From 2707bb622429834ab3c4503b17d3f7777a28d68b Mon Sep 17 00:00:00 2001 From: Cloud SDK Librarian Date: Wed, 3 Dec 2025 08:50:46 +0000 Subject: [PATCH] feat: generate libraries --- .librarian/state.yaml | 16 +- .../google/cloud/artifactregistry/__init__.py | 8 + .../cloud/artifactregistry_v1/__init__.py | 8 + .../artifactregistry_v1/gapic_metadata.json | 15 + .../artifact_registry/async_client.py | 106 + .../services/artifact_registry/client.py | 104 + .../artifact_registry/transports/base.py | 15 + .../artifact_registry/transports/grpc.py | 27 + .../transports/grpc_asyncio.py | 32 + .../artifact_registry/transports/rest.py | 215 + .../artifact_registry/transports/rest_base.py | 58 + .../artifactregistry_v1/types/__init__.py | 8 + .../cloud/artifactregistry_v1/types/export.py | 156 + ...artifact_registry_export_artifact_async.py | 59 + ..._artifact_registry_export_artifact_sync.py | 59 + ...a_google.devtools.artifactregistry.v1.json | 153 + .../fixup_artifactregistry_v1_keywords.py | 1 + .../test_artifact_registry.py | 567 ++ .../cloud/devtools/cloudbuild/__init__.py | 6 + .../cloud/devtools/cloudbuild_v1/__init__.py | 6 + .../cloudbuild_v1/gapic_metadata.json | 15 + .../services/cloud_build/async_client.py | 229 +- .../services/cloud_build/client.py | 242 +- .../services/cloud_build/transports/base.py | 17 + .../services/cloud_build/transports/grpc.py | 45 +- .../cloud_build/transports/grpc_asyncio.py | 51 +- .../services/cloud_build/transports/rest.py | 219 + .../cloud_build/transports/rest_base.py | 47 + .../devtools/cloudbuild_v1/types/__init__.py | 6 + .../cloudbuild_v1/types/cloudbuild.py | 238 +- ...build_get_default_service_account_async.py | 53 + ..._build_get_default_service_account_sync.py | 53 + ...etadata_google.devtools.cloudbuild.v1.json | 217 + .../scripts/fixup_cloudbuild_v1_keywords.py | 1 + .../gapic/cloudbuild_v1/test_cloud_build.py | 1265 ++- .../google/cloud/channel/__init__.py | 4 + .../google/cloud/channel_v1/__init__.py | 4 + .../cloud_channel_service/async_client.py | 23 +- .../services/cloud_channel_service/client.py | 36 +- .../cloud_channel_service/transports/grpc.py | 16 +- .../transports/grpc_asyncio.py | 16 +- .../google/cloud/channel_v1/types/__init__.py | 4 + .../cloud/channel_v1/types/customers.py | 10 +- .../cloud/channel_v1/types/entitlements.py | 10 + .../google/cloud/channel_v1/types/offers.py | 89 + .../cloud/channel_v1/types/repricing.py | 4 +- .../google/cloud/channel_v1/types/service.py | 103 +- .../channel_v1/types/subscriber_event.py | 5 +- ..._channel_service_list_subscribers_async.py | 4 +- ...d_channel_service_list_subscribers_sync.py | 4 +- ...annel_service_register_subscriber_async.py | 1 - ...hannel_service_register_subscriber_sync.py | 1 - ...nel_service_unregister_subscriber_async.py | 1 - ...nnel_service_unregister_subscriber_sync.py | 1 - ...ppet_metadata_google.cloud.channel.v1.json | 84 +- .../scripts/fixup_channel_v1_keywords.py | 8 +- .../channel_v1/test_cloud_channel_service.py | 145 +- .../google/cloud/gke_multicloud/__init__.py | 6 + .../cloud/gke_multicloud_v1/__init__.py | 6 + .../services/aws_clusters/async_client.py | 78 + .../services/aws_clusters/client.py | 67 + .../services/azure_clusters/async_client.py | 96 + .../services/azure_clusters/client.py | 85 + .../cloud/gke_multicloud_v1/types/__init__.py | 6 + .../types/attached_resources.py | 190 +- .../gke_multicloud_v1/types/aws_resources.py | 2 +- .../types/common_resources.py | 4 +- .../test_attached_clusters.py | 22 + .../dns_threat_detector_service.rst | 10 + .../network_security_v1beta1/services_.rst | 1 + .../network_security_v1beta1/__init__.py | 22 + .../gapic_metadata.json | 94 + .../dns_threat_detector_service/__init__.py | 22 + .../async_client.py | 1596 ++++ .../dns_threat_detector_service/client.py | 2056 +++++ .../dns_threat_detector_service/pagers.py | 201 + .../transports/README.rst | 9 + .../transports/__init__.py | 41 + .../transports/base.py | 371 + .../transports/grpc.py | 678 ++ .../transports/grpc_asyncio.py | 766 ++ .../transports/rest.py | 2712 ++++++ .../transports/rest_base.py | 629 ++ .../types/__init__.py | 16 + .../types/dns_threat_detector.py | 262 + ...ervice_create_dns_threat_detector_async.py | 57 + ...service_create_dns_threat_detector_sync.py | 57 + ...ervice_delete_dns_threat_detector_async.py | 50 + ...service_delete_dns_threat_detector_sync.py | 50 + ...r_service_get_dns_threat_detector_async.py | 53 + ...or_service_get_dns_threat_detector_sync.py | 53 + ...service_list_dns_threat_detectors_async.py | 54 + ..._service_list_dns_threat_detectors_sync.py | 54 + ...ervice_update_dns_threat_detector_async.py | 56 + ...service_update_dns_threat_detector_sync.py | 56 + ..._google.cloud.networksecurity.v1beta1.json | 823 ++ ...fixup_network_security_v1beta1_keywords.py | 5 + .../test_dns_threat_detector_service.py | 8213 +++++++++++++++++ .../google/cloud/parallelstore_v1/__init__.py | 2 + .../cloud/parallelstore_v1/types/__init__.py | 2 + .../parallelstore_v1/types/parallelstore.py | 151 +- .../fixup_parallelstore_v1_keywords.py | 4 +- .../merchant_accounts_v1/types/regions.py | 61 + .../test_regions_service.py | 13 + 104 files changed, 24271 insertions(+), 481 deletions(-) create mode 100644 packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/export.py create mode 100644 packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_export_artifact_async.py create mode 100644 packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_export_artifact_sync.py create mode 100644 packages/google-cloud-build/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_default_service_account_async.py create mode 100644 packages/google-cloud-build/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_default_service_account_sync.py create mode 100644 packages/google-cloud-network-security/docs/network_security_v1beta1/dns_threat_detector_service.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/async_client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/client.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/pagers.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/README.rst create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/__init__.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/grpc.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/rest.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/rest_base.py create mode 100644 packages/google-cloud-network-security/google/cloud/network_security_v1beta1/types/dns_threat_detector.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py create mode 100644 packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py create mode 100644 packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_dns_threat_detector_service.py diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 1bcd77bfc4f5..26b2f87d9359 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -473,7 +473,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-artifact-registry version: 1.17.0 - last_generated_commit: c2db528a3e4d12b95666c719ee0db30a3d4c78ad + last_generated_commit: 535d161c24965e9ed1a0b27032cc1c8b4beab818 apis: - path: google/devtools/artifactregistry/v1 service_config: artifactregistry_v1.yaml @@ -990,7 +990,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-build version: 3.33.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 535d161c24965e9ed1a0b27032cc1c8b4beab818 apis: - path: google/devtools/cloudbuild/v1 service_config: cloudbuild_v1.yaml @@ -1043,7 +1043,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-channel version: 1.24.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 535d161c24965e9ed1a0b27032cc1c8b4beab818 apis: - path: google/cloud/channel/v1 service_config: cloudchannel_v1.yaml @@ -1939,7 +1939,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-gke-multicloud version: 0.6.22 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 535d161c24965e9ed1a0b27032cc1c8b4beab818 apis: - path: google/cloud/gkemulticloud/v1 service_config: gkemulticloud_v1.yaml @@ -2479,7 +2479,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-network-security version: 0.9.21 - last_generated_commit: c9ff4f1cd26f1fe63e6d1c11a198366b70ebdb84 + last_generated_commit: 535d161c24965e9ed1a0b27032cc1c8b4beab818 apis: - path: google/cloud/networksecurity/v1alpha1 service_config: networksecurity_v1alpha1.yaml @@ -2651,7 +2651,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-parallelstore version: 0.3.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 535d161c24965e9ed1a0b27032cc1c8b4beab818 apis: - path: google/cloud/parallelstore/v1 service_config: parallelstore_v1.yaml @@ -3962,7 +3962,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-accounts version: 1.1.0 - last_generated_commit: a17b84add8318f780fcc8a027815d5fee644b9f7 + last_generated_commit: 535d161c24965e9ed1a0b27032cc1c8b4beab818 apis: - path: google/shopping/merchant/accounts/v1 service_config: merchantapi_v1.yaml @@ -4244,7 +4244,7 @@ libraries: tag_format: '{id}-v{version}' - id: googleapis-common-protos version: 1.72.0 - last_generated_commit: c9ff4f1cd26f1fe63e6d1c11a198366b70ebdb84 + last_generated_commit: 535d161c24965e9ed1a0b27032cc1c8b4beab818 apis: - path: google/api service_config: serviceconfig.yaml diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/__init__.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/__init__.py index b32c7cb01116..2d1c0622cea7 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/__init__.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry/__init__.py @@ -59,6 +59,11 @@ ListAttachmentsRequest, ListAttachmentsResponse, ) +from google.cloud.artifactregistry_v1.types.export import ( + ExportArtifactMetadata, + ExportArtifactRequest, + ExportArtifactResponse, +) from google.cloud.artifactregistry_v1.types.file import ( DeleteFileRequest, File, @@ -175,6 +180,9 @@ "GetAttachmentRequest", "ListAttachmentsRequest", "ListAttachmentsResponse", + "ExportArtifactMetadata", + "ExportArtifactRequest", + "ExportArtifactResponse", "DeleteFileRequest", "File", "GetFileRequest", diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/__init__.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/__init__.py index 4304c00bb99b..acd669d4c020 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/__init__.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/__init__.py @@ -57,6 +57,11 @@ ListAttachmentsRequest, ListAttachmentsResponse, ) +from .types.export import ( + ExportArtifactMetadata, + ExportArtifactRequest, + ExportArtifactResponse, +) from .types.file import ( DeleteFileRequest, File, @@ -163,6 +168,9 @@ "DeleteTagRequest", "DeleteVersionRequest", "DockerImage", + "ExportArtifactMetadata", + "ExportArtifactRequest", + "ExportArtifactResponse", "File", "GenericArtifact", "GetAttachmentRequest", diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_metadata.json b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_metadata.json index 7824e41e78fd..497bb6f71894 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_metadata.json +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/gapic_metadata.json @@ -70,6 +70,11 @@ "delete_version" ] }, + "ExportArtifact": { + "methods": [ + "export_artifact" + ] + }, "GetAttachment": { "methods": [ "get_attachment" @@ -320,6 +325,11 @@ "delete_version" ] }, + "ExportArtifact": { + "methods": [ + "export_artifact" + ] + }, "GetAttachment": { "methods": [ "get_attachment" @@ -570,6 +580,11 @@ "delete_version" ] }, + "ExportArtifact": { + "methods": [ + "export_artifact" + ] + }, "GetAttachment": { "methods": [ "get_attachment" diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py index 2bad4292b951..a9848db8fbe6 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/async_client.py @@ -61,6 +61,7 @@ from google.cloud.artifactregistry_v1.types import apt_artifact, artifact from google.cloud.artifactregistry_v1.types import attachment from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import export from google.cloud.artifactregistry_v1.types import file from google.cloud.artifactregistry_v1.types import file as gda_file from google.cloud.artifactregistry_v1.types import package @@ -6278,6 +6279,111 @@ async def sample_delete_attachment(): # Done; return the response. return response + async def export_artifact( + self, + request: Optional[Union[export.ExportArtifactRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports an artifact. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + async def sample_export_artifact(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ExportArtifactRequest( + source_version="source_version_value", + gcs_path="gcs_path_value", + repository="repository_value", + ) + + # Make the request + operation = client.export_artifact(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.artifactregistry_v1.types.ExportArtifactRequest, dict]]): + The request object. The request for exporting an artifact + to a destination. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.artifactregistry_v1.types.ExportArtifactResponse` + The response for exporting an artifact to a destination. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, export.ExportArtifactRequest): + request = export.ExportArtifactRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_artifact + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("repository", request.repository),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + export.ExportArtifactResponse, + metadata_type=export.ExportArtifactMetadata, + ) + + # Done; return the response. + return response + async def get_operation( self, request: Optional[operations_pb2.GetOperationRequest] = None, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py index 77872669f101..a1663571d77b 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/client.py @@ -78,6 +78,7 @@ from google.cloud.artifactregistry_v1.types import apt_artifact, artifact from google.cloud.artifactregistry_v1.types import attachment from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import export from google.cloud.artifactregistry_v1.types import file from google.cloud.artifactregistry_v1.types import file as gda_file from google.cloud.artifactregistry_v1.types import package @@ -6873,6 +6874,109 @@ def sample_delete_attachment(): # Done; return the response. return response + def export_artifact( + self, + request: Optional[Union[export.ExportArtifactRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Exports an artifact. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import artifactregistry_v1 + + def sample_export_artifact(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ExportArtifactRequest( + source_version="source_version_value", + gcs_path="gcs_path_value", + repository="repository_value", + ) + + # Make the request + operation = client.export_artifact(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.artifactregistry_v1.types.ExportArtifactRequest, dict]): + The request object. The request for exporting an artifact + to a destination. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.artifactregistry_v1.types.ExportArtifactResponse` + The response for exporting an artifact to a destination. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, export.ExportArtifactRequest): + request = export.ExportArtifactRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_artifact] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("repository", request.repository),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + export.ExportArtifactResponse, + metadata_type=export.ExportArtifactMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ArtifactRegistryClient": return self diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py index 3e1220b0d0bb..0dc724eb96a0 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/base.py @@ -35,6 +35,7 @@ from google.cloud.artifactregistry_v1.types import apt_artifact, artifact from google.cloud.artifactregistry_v1.types import attachment from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import export from google.cloud.artifactregistry_v1.types import file from google.cloud.artifactregistry_v1.types import file as gda_file from google.cloud.artifactregistry_v1.types import package @@ -404,6 +405,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_artifact: gapic_v1.method.wrap_method( + self.export_artifact, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -879,6 +885,15 @@ def delete_attachment( ]: raise NotImplementedError() + @property + def export_artifact( + self, + ) -> Callable[ + [export.ExportArtifactRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_operation( self, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py index d0c7bb6d64ac..1db957b7e39c 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc.py @@ -37,6 +37,7 @@ from google.cloud.artifactregistry_v1.types import apt_artifact, artifact from google.cloud.artifactregistry_v1.types import attachment from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import export from google.cloud.artifactregistry_v1.types import file from google.cloud.artifactregistry_v1.types import file as gda_file from google.cloud.artifactregistry_v1.types import package @@ -1658,6 +1659,32 @@ def delete_attachment( ) return self._stubs["delete_attachment"] + @property + def export_artifact( + self, + ) -> Callable[[export.ExportArtifactRequest], operations_pb2.Operation]: + r"""Return a callable for the export artifact method over gRPC. + + Exports an artifact. + + Returns: + Callable[[~.ExportArtifactRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_artifact" not in self._stubs: + self._stubs["export_artifact"] = self._logged_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ExportArtifact", + request_serializer=export.ExportArtifactRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_artifact"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py index 4ab313fad9a1..29b25b12e36e 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/grpc_asyncio.py @@ -40,6 +40,7 @@ from google.cloud.artifactregistry_v1.types import apt_artifact, artifact from google.cloud.artifactregistry_v1.types import attachment from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import export from google.cloud.artifactregistry_v1.types import file from google.cloud.artifactregistry_v1.types import file as gda_file from google.cloud.artifactregistry_v1.types import package @@ -1727,6 +1728,32 @@ def delete_attachment( ) return self._stubs["delete_attachment"] + @property + def export_artifact( + self, + ) -> Callable[[export.ExportArtifactRequest], Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the export artifact method over gRPC. + + Exports an artifact. + + Returns: + Callable[[~.ExportArtifactRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_artifact" not in self._stubs: + self._stubs["export_artifact"] = self._logged_channel.unary_unary( + "/google.devtools.artifactregistry.v1.ArtifactRegistry/ExportArtifact", + request_serializer=export.ExportArtifactRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_artifact"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1975,6 +2002,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_artifact: self._wrap_method( + self.export_artifact, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py index 6566b7343e5b..3511c433979d 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest.py @@ -37,6 +37,7 @@ from google.cloud.artifactregistry_v1.types import apt_artifact, artifact from google.cloud.artifactregistry_v1.types import attachment from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import export from google.cloud.artifactregistry_v1.types import file from google.cloud.artifactregistry_v1.types import file as gda_file from google.cloud.artifactregistry_v1.types import package @@ -183,6 +184,14 @@ def post_delete_version(self, response): logging.log(f"Received response: {response}") return response + def pre_export_artifact(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_artifact(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_attachment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -971,6 +980,52 @@ def post_delete_version_with_metadata( """ return response, metadata + def pre_export_artifact( + self, + request: export.ExportArtifactRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[export.ExportArtifactRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for export_artifact + + Override in a subclass to manipulate the request or metadata + before they are sent to the ArtifactRegistry server. + """ + return request, metadata + + def post_export_artifact( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_artifact + + DEPRECATED. Please use the `post_export_artifact_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ArtifactRegistry server but before + it is returned to user code. This `post_export_artifact` interceptor runs + before the `post_export_artifact_with_metadata` interceptor. + """ + return response + + def post_export_artifact_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_artifact + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ArtifactRegistry server but before it is returned to user code. + + We recommend only using this `post_export_artifact_with_metadata` + interceptor in new development instead of the `post_export_artifact` interceptor. + When both interceptors are used, this `post_export_artifact_with_metadata` interceptor runs after the + `post_export_artifact` interceptor. The (possibly modified) response returned by + `post_export_artifact` will be passed to + `post_export_artifact_with_metadata`. + """ + return response, metadata + def pre_get_attachment( self, request: attachment.GetAttachmentRequest, @@ -4623,6 +4678,158 @@ def __call__( ) return resp + class _ExportArtifact( + _BaseArtifactRegistryRestTransport._BaseExportArtifact, ArtifactRegistryRestStub + ): + def __hash__(self): + return hash("ArtifactRegistryRestTransport.ExportArtifact") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: export.ExportArtifactRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the export artifact method over HTTP. + + Args: + request (~.export.ExportArtifactRequest): + The request object. The request for exporting an artifact + to a destination. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseArtifactRegistryRestTransport._BaseExportArtifact._get_http_options() + ) + + request, metadata = self._interceptor.pre_export_artifact(request, metadata) + transcoded_request = _BaseArtifactRegistryRestTransport._BaseExportArtifact._get_transcoded_request( + http_options, request + ) + + body = _BaseArtifactRegistryRestTransport._BaseExportArtifact._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseArtifactRegistryRestTransport._BaseExportArtifact._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.devtools.artifactregistry_v1.ArtifactRegistryClient.ExportArtifact", + extra={ + "serviceName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "rpcName": "ExportArtifact", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ArtifactRegistryRestTransport._ExportArtifact._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_export_artifact(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_artifact_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.devtools.artifactregistry_v1.ArtifactRegistryClient.export_artifact", + extra={ + "serviceName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "rpcName": "ExportArtifact", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetAttachment( _BaseArtifactRegistryRestTransport._BaseGetAttachment, ArtifactRegistryRestStub ): @@ -10426,6 +10633,14 @@ def delete_version( # In C++ this would require a dynamic_cast return self._DeleteVersion(self._session, self._host, self._interceptor) # type: ignore + @property + def export_artifact( + self, + ) -> Callable[[export.ExportArtifactRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportArtifact(self._session, self._host, self._interceptor) # type: ignore + @property def get_attachment( self, diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest_base.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest_base.py index 2d143239b43d..fd092fc2c9af 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest_base.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/services/artifact_registry/transports/rest_base.py @@ -29,6 +29,7 @@ from google.cloud.artifactregistry_v1.types import apt_artifact, artifact from google.cloud.artifactregistry_v1.types import attachment from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import export from google.cloud.artifactregistry_v1.types import file from google.cloud.artifactregistry_v1.types import file as gda_file from google.cloud.artifactregistry_v1.types import package @@ -683,6 +684,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseExportArtifact: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{repository=projects/*/locations/*/repositories/*}:exportArtifact", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = export.ExportArtifactRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseArtifactRegistryRestTransport._BaseExportArtifact._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetAttachment: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/__init__.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/__init__.py index 34c00329d2a4..ae936e3c1384 100644 --- a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/__init__.py +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/__init__.py @@ -48,6 +48,11 @@ ListAttachmentsRequest, ListAttachmentsResponse, ) +from .export import ( + ExportArtifactMetadata, + ExportArtifactRequest, + ExportArtifactResponse, +) from .file import ( DeleteFileRequest, File, @@ -158,6 +163,9 @@ "GetAttachmentRequest", "ListAttachmentsRequest", "ListAttachmentsResponse", + "ExportArtifactMetadata", + "ExportArtifactRequest", + "ExportArtifactResponse", "DeleteFileRequest", "File", "GetFileRequest", diff --git a/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/export.py b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/export.py new file mode 100644 index 000000000000..2c9a275ced77 --- /dev/null +++ b/packages/google-cloud-artifact-registry/google/cloud/artifactregistry_v1/types/export.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.artifactregistry_v1.types import file, version + +__protobuf__ = proto.module( + package="google.devtools.artifactregistry.v1", + manifest={ + "ExportArtifactRequest", + "ExportArtifactResponse", + "ExportArtifactMetadata", + }, +) + + +class ExportArtifactRequest(proto.Message): + r"""The request for exporting an artifact to a destination. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_version (str): + The artifact version to export. + Format: + + projects/{project}/locations/{location}/repositories/{repository}/packages/{package}/versions/{version} + + This field is a member of `oneof`_ ``source_artifact``. + source_tag (str): + The artifact tag to export. + Format:projects/{project}/locations/{location}/repositories/{repository}/packages/{package}/tags/{tag} + + This field is a member of `oneof`_ ``source_artifact``. + gcs_path (str): + The Cloud Storage path to export the artifact to. Should + start with the bucket name, and optionally have a directory + path. Examples: ``dst_bucket``, ``dst_bucket/sub_dir``. + Existing objects with the same path will be overwritten. + + This field is a member of `oneof`_ ``destination``. + repository (str): + Required. The repository of the artifact to + export. Format: + projects/{project}/locations/{location}/repositories/{repository} + """ + + source_version: str = proto.Field( + proto.STRING, + number=2, + oneof="source_artifact", + ) + source_tag: str = proto.Field( + proto.STRING, + number=4, + oneof="source_artifact", + ) + gcs_path: str = proto.Field( + proto.STRING, + number=3, + oneof="destination", + ) + repository: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ExportArtifactResponse(proto.Message): + r"""The response for exporting an artifact to a destination. + + Attributes: + exported_version (google.cloud.artifactregistry_v1.types.Version): + The exported version. Should be the same as + the request version with fingerprint resource + name. + """ + + exported_version: version.Version = proto.Field( + proto.MESSAGE, + number=1, + message=version.Version, + ) + + +class ExportArtifactMetadata(proto.Message): + r"""The LRO metadata for exporting an artifact. + + Attributes: + exported_files (MutableSequence[google.cloud.artifactregistry_v1.types.ExportArtifactMetadata.ExportedFile]): + The exported artifact files. + """ + + class ExportedFile(proto.Message): + r"""The exported artifact file. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcs_object_path (str): + Cloud Storage Object path of the exported file. Examples: + ``dst_bucket/file1``, ``dst_bucket/sub_dir/file1`` + + This field is a member of `oneof`_ ``destination``. + name (str): + Name of the exported artifact file. Format: + ``projects/p1/locations/us/repositories/repo1/files/file1`` + hashes (MutableSequence[google.cloud.artifactregistry_v1.types.Hash]): + The hashes of the file content. + """ + + gcs_object_path: str = proto.Field( + proto.STRING, + number=2, + oneof="destination", + ) + name: str = proto.Field( + proto.STRING, + number=1, + ) + hashes: MutableSequence[file.Hash] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=file.Hash, + ) + + exported_files: MutableSequence[ExportedFile] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=ExportedFile, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_export_artifact_async.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_export_artifact_async.py new file mode 100644 index 000000000000..f344ee55ad3f --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_export_artifact_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportArtifact +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_ExportArtifact_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +async def sample_export_artifact(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryAsyncClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ExportArtifactRequest( + source_version="source_version_value", + gcs_path="gcs_path_value", + repository="repository_value", + ) + + # Make the request + operation = client.export_artifact(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END artifactregistry_v1_generated_ArtifactRegistry_ExportArtifact_async] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_export_artifact_sync.py b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_export_artifact_sync.py new file mode 100644 index 000000000000..8157f868445c --- /dev/null +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/artifactregistry_v1_generated_artifact_registry_export_artifact_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportArtifact +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-artifact-registry + + +# [START artifactregistry_v1_generated_ArtifactRegistry_ExportArtifact_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import artifactregistry_v1 + + +def sample_export_artifact(): + # Create a client + client = artifactregistry_v1.ArtifactRegistryClient() + + # Initialize request argument(s) + request = artifactregistry_v1.ExportArtifactRequest( + source_version="source_version_value", + gcs_path="gcs_path_value", + repository="repository_value", + ) + + # Make the request + operation = client.export_artifact(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END artifactregistry_v1_generated_ArtifactRegistry_ExportArtifact_sync] diff --git a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json index c6ff04c30a96..bf9f49bf1514 100644 --- a/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json +++ b/packages/google-cloud-artifact-registry/samples/generated_samples/snippet_metadata_google.devtools.artifactregistry.v1.json @@ -2003,6 +2003,159 @@ ], "title": "artifactregistry_v1_generated_artifact_registry_delete_version_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient", + "shortName": "ArtifactRegistryAsyncClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryAsyncClient.export_artifact", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ExportArtifact", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ExportArtifact" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ExportArtifactRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_artifact" + }, + "description": "Sample for ExportArtifact", + "file": "artifactregistry_v1_generated_artifact_registry_export_artifact_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ExportArtifact_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_export_artifact_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient", + "shortName": "ArtifactRegistryClient" + }, + "fullName": "google.cloud.artifactregistry_v1.ArtifactRegistryClient.export_artifact", + "method": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry.ExportArtifact", + "service": { + "fullName": "google.devtools.artifactregistry.v1.ArtifactRegistry", + "shortName": "ArtifactRegistry" + }, + "shortName": "ExportArtifact" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.artifactregistry_v1.types.ExportArtifactRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_artifact" + }, + "description": "Sample for ExportArtifact", + "file": "artifactregistry_v1_generated_artifact_registry_export_artifact_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "artifactregistry_v1_generated_ArtifactRegistry_ExportArtifact_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 54, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "start": 55, + "type": "RESPONSE_HANDLING" + } + ], + "title": "artifactregistry_v1_generated_artifact_registry_export_artifact_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-artifact-registry/scripts/fixup_artifactregistry_v1_keywords.py b/packages/google-cloud-artifact-registry/scripts/fixup_artifactregistry_v1_keywords.py index 24d9f9d27767..9c0fb04603e7 100644 --- a/packages/google-cloud-artifact-registry/scripts/fixup_artifactregistry_v1_keywords.py +++ b/packages/google-cloud-artifact-registry/scripts/fixup_artifactregistry_v1_keywords.py @@ -51,6 +51,7 @@ class artifactregistryCallTransformer(cst.CSTTransformer): 'delete_rule': ('name', ), 'delete_tag': ('name', ), 'delete_version': ('name', 'force', ), + 'export_artifact': ('repository', 'source_version', 'source_tag', 'gcs_path', ), 'get_attachment': ('name', ), 'get_docker_image': ('name', ), 'get_file': ('name', ), diff --git a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py index 12c4f43d2fdd..20962f690260 100644 --- a/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py +++ b/packages/google-cloud-artifact-registry/tests/unit/gapic/artifactregistry_v1/test_artifact_registry.py @@ -82,6 +82,7 @@ from google.cloud.artifactregistry_v1.types import apt_artifact, artifact from google.cloud.artifactregistry_v1.types import attachment from google.cloud.artifactregistry_v1.types import attachment as gda_attachment +from google.cloud.artifactregistry_v1.types import export from google.cloud.artifactregistry_v1.types import file from google.cloud.artifactregistry_v1.types import file as gda_file from google.cloud.artifactregistry_v1.types import package @@ -19616,6 +19617,257 @@ async def test_delete_attachment_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + export.ExportArtifactRequest, + dict, + ], +) +def test_export_artifact(request_type, transport: str = "grpc"): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_artifact), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_artifact(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = export.ExportArtifactRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_artifact_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = export.ExportArtifactRequest( + source_version="source_version_value", + source_tag="source_tag_value", + gcs_path="gcs_path_value", + repository="repository_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_artifact), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.export_artifact(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == export.ExportArtifactRequest( + source_version="source_version_value", + source_tag="source_tag_value", + gcs_path="gcs_path_value", + repository="repository_value", + ) + + +def test_export_artifact_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_artifact in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_artifact] = mock_rpc + request = {} + client.export_artifact(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_artifact(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_export_artifact_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.export_artifact + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.export_artifact + ] = mock_rpc + + request = {} + await client.export_artifact(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_artifact(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_export_artifact_async( + transport: str = "grpc_asyncio", request_type=export.ExportArtifactRequest +): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_artifact), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_artifact(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = export.ExportArtifactRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_artifact_async_from_dict(): + await test_export_artifact_async(request_type=dict) + + +def test_export_artifact_field_headers(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = export.ExportArtifactRequest() + + request.repository = "repository_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_artifact), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_artifact(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "repository=repository_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_artifact_field_headers_async(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = export.ExportArtifactRequest() + + request.repository = "repository_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.export_artifact), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_artifact(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "repository=repository_value", + ) in kw["metadata"] + + def test_list_docker_images_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -27897,6 +28149,128 @@ def test_delete_attachment_rest_flattened_error(transport: str = "rest"): ) +def test_export_artifact_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_artifact in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_artifact] = mock_rpc + + request = {} + client.export_artifact(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_artifact(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_export_artifact_rest_required_fields( + request_type=export.ExportArtifactRequest, +): + transport_class = transports.ArtifactRegistryRestTransport + + request_init = {} + request_init["repository"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_artifact._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["repository"] = "repository_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_artifact._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "repository" in jsonified_request + assert jsonified_request["repository"] == "repository_value" + + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.export_artifact(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_export_artifact_rest_unset_required_fields(): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.export_artifact._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("repository",))) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ArtifactRegistryGrpcTransport( @@ -29070,6 +29444,27 @@ def test_delete_attachment_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_artifact_empty_call_grpc(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_artifact), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_artifact(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = export.ExportArtifactRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = ArtifactRegistryAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -30485,6 +30880,31 @@ async def test_delete_attachment_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_artifact_empty_call_grpc_asyncio(): + client = ArtifactRegistryAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_artifact), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.export_artifact(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = export.ExportArtifactRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = ArtifactRegistryClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -37889,6 +38309,129 @@ def test_delete_attachment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_export_artifact_rest_bad_request(request_type=export.ExportArtifactRequest): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "repository": "projects/sample1/locations/sample2/repositories/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.export_artifact(request) + + +@pytest.mark.parametrize( + "request_type", + [ + export.ExportArtifactRequest, + dict, + ], +) +def test_export_artifact_rest_call_success(request_type): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "repository": "projects/sample1/locations/sample2/repositories/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.export_artifact(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_artifact_rest_interceptors(null_interceptor): + transport = transports.ArtifactRegistryRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ArtifactRegistryRestInterceptor(), + ) + client = ArtifactRegistryClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_export_artifact" + ) as post, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "post_export_artifact_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ArtifactRegistryRestInterceptor, "pre_export_artifact" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = export.ExportArtifactRequest.pb(export.ExportArtifactRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = export.ExportArtifactRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.export_artifact( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), @@ -39096,6 +39639,26 @@ def test_delete_attachment_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_artifact_empty_call_rest(): + client = ArtifactRegistryClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_artifact), "__call__") as call: + client.export_artifact(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = export.ExportArtifactRequest() + + assert args[0] == request_msg + + def test_artifact_registry_rest_lro_client(): client = ArtifactRegistryClient( credentials=ga_credentials.AnonymousCredentials(), @@ -39195,6 +39758,7 @@ def test_artifact_registry_base_transport(): "get_attachment", "create_attachment", "delete_attachment", + "export_artifact", "get_location", "list_locations", "get_operation", @@ -39620,6 +40184,9 @@ def test_artifact_registry_client_transport_session_collision(transport_name): session1 = client1.transport.delete_attachment._session session2 = client2.transport.delete_attachment._session assert session1 != session2 + session1 = client1.transport.export_artifact._session + session2 = client2.transport.export_artifact._session + assert session1 != session2 def test_artifact_registry_grpc_transport_channel(): diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/__init__.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/__init__.py index eccb6ff7970f..ecd6456bd752 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild/__init__.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild/__init__.py @@ -38,10 +38,12 @@ BuildTrigger, BuiltImage, CancelBuildRequest, + ConnectedRepository, CreateBuildRequest, CreateBuildTriggerRequest, CreateWorkerPoolOperationMetadata, CreateWorkerPoolRequest, + DefaultServiceAccount, DeleteBuildTriggerRequest, DeleteWorkerPoolOperationMetadata, DeleteWorkerPoolRequest, @@ -49,6 +51,7 @@ FileHashes, GetBuildRequest, GetBuildTriggerRequest, + GetDefaultServiceAccountRequest, GetWorkerPoolRequest, GitConfig, GitFileSource, @@ -112,10 +115,12 @@ "BuildTrigger", "BuiltImage", "CancelBuildRequest", + "ConnectedRepository", "CreateBuildRequest", "CreateBuildTriggerRequest", "CreateWorkerPoolOperationMetadata", "CreateWorkerPoolRequest", + "DefaultServiceAccount", "DeleteBuildTriggerRequest", "DeleteWorkerPoolOperationMetadata", "DeleteWorkerPoolRequest", @@ -123,6 +128,7 @@ "FileHashes", "GetBuildRequest", "GetBuildTriggerRequest", + "GetDefaultServiceAccountRequest", "GetWorkerPoolRequest", "GitConfig", "GitFileSource", diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/__init__.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/__init__.py index 05c4b093e95b..70e9881398bd 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/__init__.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/__init__.py @@ -33,10 +33,12 @@ BuildTrigger, BuiltImage, CancelBuildRequest, + ConnectedRepository, CreateBuildRequest, CreateBuildTriggerRequest, CreateWorkerPoolOperationMetadata, CreateWorkerPoolRequest, + DefaultServiceAccount, DeleteBuildTriggerRequest, DeleteWorkerPoolOperationMetadata, DeleteWorkerPoolRequest, @@ -44,6 +46,7 @@ FileHashes, GetBuildRequest, GetBuildTriggerRequest, + GetDefaultServiceAccountRequest, GetWorkerPoolRequest, GitConfig, GitFileSource, @@ -107,10 +110,12 @@ "BuiltImage", "CancelBuildRequest", "CloudBuildClient", + "ConnectedRepository", "CreateBuildRequest", "CreateBuildTriggerRequest", "CreateWorkerPoolOperationMetadata", "CreateWorkerPoolRequest", + "DefaultServiceAccount", "DeleteBuildTriggerRequest", "DeleteWorkerPoolOperationMetadata", "DeleteWorkerPoolRequest", @@ -118,6 +123,7 @@ "FileHashes", "GetBuildRequest", "GetBuildTriggerRequest", + "GetDefaultServiceAccountRequest", "GetWorkerPoolRequest", "GitConfig", "GitFileSource", diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json index 2648fd2465f5..0b968ec87869 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json @@ -55,6 +55,11 @@ "get_build_trigger" ] }, + "GetDefaultServiceAccount": { + "methods": [ + "get_default_service_account" + ] + }, "GetWorkerPool": { "methods": [ "get_worker_pool" @@ -150,6 +155,11 @@ "get_build_trigger" ] }, + "GetDefaultServiceAccount": { + "methods": [ + "get_default_service_account" + ] + }, "GetWorkerPool": { "methods": [ "get_worker_pool" @@ -245,6 +255,11 @@ "get_build_trigger" ] }, + "GetDefaultServiceAccount": { + "methods": [ + "get_default_service_account" + ] + }, "GetWorkerPool": { "methods": [ "get_worker_pool" diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index c2a8c78587ee..8cb10192be43 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -94,6 +94,12 @@ class CloudBuildAsyncClient: parse_build_trigger_path = staticmethod(CloudBuildClient.parse_build_trigger_path) crypto_key_path = staticmethod(CloudBuildClient.crypto_key_path) parse_crypto_key_path = staticmethod(CloudBuildClient.parse_crypto_key_path) + default_service_account_path = staticmethod( + CloudBuildClient.default_service_account_path + ) + parse_default_service_account_path = staticmethod( + CloudBuildClient.parse_default_service_account_path + ) github_enterprise_config_path = staticmethod( CloudBuildClient.github_enterprise_config_path ) @@ -336,6 +342,7 @@ async def create_build( *, project_id: Optional[str] = None, build: Optional[cloudbuild.Build] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), @@ -389,6 +396,13 @@ async def sample_create_build(): This corresponds to the ``build`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + parent (:class:`str`): + The parent resource where this build will be created. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -432,7 +446,7 @@ async def sample_create_build(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, build] + flattened_params = [project_id, build, parent] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -453,6 +467,8 @@ async def sample_create_build(): request.project_id = project_id if build is not None: request.build = build + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -502,6 +518,7 @@ async def get_build( *, project_id: Optional[str] = None, id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), @@ -552,6 +569,13 @@ async def sample_get_build(): This corresponds to the ``id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (:class:`str`): + The name of the ``Build`` to retrieve. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -591,7 +615,7 @@ async def sample_get_build(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, id] + flattened_params = [project_id, id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -612,6 +636,8 @@ async def sample_get_build(): request.project_id = project_id if id is not None: request.id = id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -797,6 +823,7 @@ async def cancel_build( *, project_id: Optional[str] = None, id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), @@ -843,6 +870,13 @@ async def sample_cancel_build(): This corresponds to the ``id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (:class:`str`): + The name of the ``Build`` to cancel. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -882,7 +916,7 @@ async def sample_cancel_build(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, id] + flattened_params = [project_id, id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -903,6 +937,8 @@ async def sample_cancel_build(): request.project_id = project_id if id is not None: request.id = id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -944,6 +980,7 @@ async def retry_build( *, project_id: Optional[str] = None, id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), @@ -1025,6 +1062,13 @@ async def sample_retry_build(): This corresponds to the ``id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (:class:`str`): + The name of the ``Build`` to retry. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1068,7 +1112,7 @@ async def sample_retry_build(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, id] + flattened_params = [project_id, id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1089,6 +1133,8 @@ async def sample_retry_build(): request.project_id = project_id if id is not None: request.id = id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1144,8 +1190,9 @@ async def approve_build( ) -> operation_async.AsyncOperation: r"""Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. + If approved, the returned long-running operation (LRO) + will be analogous to the LRO returned from a CreateBuild + call. If rejected, the returned LRO will be immediately done. @@ -1308,14 +1355,13 @@ async def create_build_trigger( *, project_id: Optional[str] = None, trigger: Optional[cloudbuild.BuildTrigger] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloudbuild.BuildTrigger: r"""Creates a new ``BuildTrigger``. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1361,6 +1407,13 @@ async def sample_create_build_trigger(): This corresponds to the ``trigger`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + parent (:class:`str`): + The parent resource where this trigger will be created. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1379,7 +1432,7 @@ async def sample_create_build_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, trigger] + flattened_params = [project_id, trigger, parent] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1400,6 +1453,8 @@ async def sample_create_build_trigger(): request.project_id = project_id if trigger is not None: request.trigger = trigger + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1441,14 +1496,13 @@ async def get_build_trigger( *, project_id: Optional[str] = None, trigger_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloudbuild.BuildTrigger: r"""Returns information about a ``BuildTrigger``. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1493,6 +1547,13 @@ async def sample_get_build_trigger(): This corresponds to the ``trigger_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (:class:`str`): + The name of the ``Trigger`` to retrieve. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1511,7 +1572,7 @@ async def sample_get_build_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, trigger_id] + flattened_params = [project_id, trigger_id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1532,6 +1593,8 @@ async def sample_get_build_trigger(): request.project_id = project_id if trigger_id is not None: request.trigger_id = trigger_id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1578,8 +1641,6 @@ async def list_build_triggers( ) -> pagers.ListBuildTriggersAsyncPager: r"""Lists existing ``BuildTrigger``\ s. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1707,14 +1768,13 @@ async def delete_build_trigger( *, project_id: Optional[str] = None, trigger_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1754,6 +1814,13 @@ async def sample_delete_build_trigger(): This corresponds to the ``trigger_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (:class:`str`): + The name of the ``Trigger`` to delete. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1765,7 +1832,7 @@ async def sample_delete_build_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, trigger_id] + flattened_params = [project_id, trigger_id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1786,6 +1853,8 @@ async def sample_delete_build_trigger(): request.project_id = project_id if trigger_id is not None: request.trigger_id = trigger_id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1831,8 +1900,6 @@ async def update_build_trigger( ) -> cloudbuild.BuildTrigger: r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -2732,8 +2799,8 @@ async def sample_update_worker_pool(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - A mask specifying which fields in ``worker_pool`` to - update. + Optional. A mask specifying which fields in + ``worker_pool`` to update. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2969,6 +3036,126 @@ async def sample_list_worker_pools(): # Done; return the response. return response + async def get_default_service_account( + self, + request: Optional[ + Union[cloudbuild.GetDefaultServiceAccountRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloudbuild.DefaultServiceAccount: + r"""Returns the ``DefaultServiceAccount`` used by the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + async def sample_get_default_service_account(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetDefaultServiceAccountRequest( + name="name_value", + ) + + # Make the request + response = await client.get_default_service_account(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.devtools.cloudbuild_v1.types.GetDefaultServiceAccountRequest, dict]]): + The request object. Returns the default service account that will be used + for ``Builds``. + name (:class:`str`): + Required. The name of the ``DefaultServiceAccount`` to + retrieve. Format: + ``projects/{project}/locations/{location}/defaultServiceAccount`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.DefaultServiceAccount: + The default service account used for Builds. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudbuild.GetDefaultServiceAccountRequest): + request = cloudbuild.GetDefaultServiceAccountRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_default_service_account + ] + + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/defaultServiceAccount$" + ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "CloudBuildAsyncClient": return self diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 117a76522344..34c2866d95ce 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -267,6 +267,26 @@ def parse_crypto_key_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def default_service_account_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified default_service_account string.""" + return "projects/{project}/locations/{location}/defaultServiceAccount".format( + project=project, + location=location, + ) + + @staticmethod + def parse_default_service_account_path(path: str) -> Dict[str, str]: + """Parses a default_service_account path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/defaultServiceAccount$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def github_enterprise_config_path( project: str, @@ -972,6 +992,7 @@ def create_build( *, project_id: Optional[str] = None, build: Optional[cloudbuild.Build] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), @@ -1025,6 +1046,13 @@ def sample_create_build(): This corresponds to the ``build`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + parent (str): + The parent resource where this build will be created. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1068,7 +1096,7 @@ def sample_create_build(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, build] + flattened_params = [project_id, build, parent] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1088,6 +1116,8 @@ def sample_create_build(): request.project_id = project_id if build is not None: request.build = build + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1135,6 +1165,7 @@ def get_build( *, project_id: Optional[str] = None, id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), @@ -1185,6 +1216,13 @@ def sample_get_build(): This corresponds to the ``id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (str): + The name of the ``Build`` to retrieve. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1224,7 +1262,7 @@ def sample_get_build(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, id] + flattened_params = [project_id, id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1244,6 +1282,8 @@ def sample_get_build(): request.project_id = project_id if id is not None: request.id = id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1424,6 +1464,7 @@ def cancel_build( *, project_id: Optional[str] = None, id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), @@ -1470,6 +1511,13 @@ def sample_cancel_build(): This corresponds to the ``id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (str): + The name of the ``Build`` to cancel. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1509,7 +1557,7 @@ def sample_cancel_build(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, id] + flattened_params = [project_id, id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1529,6 +1577,8 @@ def sample_cancel_build(): request.project_id = project_id if id is not None: request.id = id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1568,6 +1618,7 @@ def retry_build( *, project_id: Optional[str] = None, id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), @@ -1649,6 +1700,13 @@ def sample_retry_build(): This corresponds to the ``id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (str): + The name of the ``Build`` to retry. Format: + ``projects/{project}/locations/{location}/builds/{build}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1692,7 +1750,7 @@ def sample_retry_build(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, id] + flattened_params = [project_id, id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -1712,6 +1770,8 @@ def sample_retry_build(): request.project_id = project_id if id is not None: request.id = id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1765,8 +1825,9 @@ def approve_build( ) -> operation.Operation: r"""Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. + If approved, the returned long-running operation (LRO) + will be analogous to the LRO returned from a CreateBuild + call. If rejected, the returned LRO will be immediately done. @@ -1926,14 +1987,13 @@ def create_build_trigger( *, project_id: Optional[str] = None, trigger: Optional[cloudbuild.BuildTrigger] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloudbuild.BuildTrigger: r"""Creates a new ``BuildTrigger``. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -1979,6 +2039,13 @@ def sample_create_build_trigger(): This corresponds to the ``trigger`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + parent (str): + The parent resource where this trigger will be created. + Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1997,7 +2064,7 @@ def sample_create_build_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, trigger] + flattened_params = [project_id, trigger, parent] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -2017,6 +2084,8 @@ def sample_create_build_trigger(): request.project_id = project_id if trigger is not None: request.trigger = trigger + if parent is not None: + request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2056,14 +2125,13 @@ def get_build_trigger( *, project_id: Optional[str] = None, trigger_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloudbuild.BuildTrigger: r"""Returns information about a ``BuildTrigger``. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -2108,6 +2176,13 @@ def sample_get_build_trigger(): This corresponds to the ``trigger_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (str): + The name of the ``Trigger`` to retrieve. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2126,7 +2201,7 @@ def sample_get_build_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, trigger_id] + flattened_params = [project_id, trigger_id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -2146,6 +2221,8 @@ def sample_get_build_trigger(): request.project_id = project_id if trigger_id is not None: request.trigger_id = trigger_id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2190,8 +2267,6 @@ def list_build_triggers( ) -> pagers.ListBuildTriggersPager: r"""Lists existing ``BuildTrigger``\ s. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -2316,14 +2391,13 @@ def delete_build_trigger( *, project_id: Optional[str] = None, trigger_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a ``BuildTrigger`` by its project ID and trigger ID. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -2363,6 +2437,13 @@ def sample_delete_build_trigger(): This corresponds to the ``trigger_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + name (str): + The name of the ``Trigger`` to delete. Format: + ``projects/{project}/locations/{location}/triggers/{trigger}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2374,7 +2455,7 @@ def sample_delete_build_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [project_id, trigger_id] + flattened_params = [project_id, trigger_id, name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -2394,6 +2475,8 @@ def sample_delete_build_trigger(): request.project_id = project_id if trigger_id is not None: request.trigger_id = trigger_id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2437,8 +2520,6 @@ def update_build_trigger( ) -> cloudbuild.BuildTrigger: r"""Updates a ``BuildTrigger`` by its project ID and trigger ID. - This API is experimental. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a @@ -3321,8 +3402,8 @@ def sample_update_worker_pool(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask specifying which fields in ``worker_pool`` to - update. + Optional. A mask specifying which fields in + ``worker_pool`` to update. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -3552,6 +3633,125 @@ def sample_list_worker_pools(): # Done; return the response. return response + def get_default_service_account( + self, + request: Optional[ + Union[cloudbuild.GetDefaultServiceAccountRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloudbuild.DefaultServiceAccount: + r"""Returns the ``DefaultServiceAccount`` used by the project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.devtools import cloudbuild_v1 + + def sample_get_default_service_account(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetDefaultServiceAccountRequest( + name="name_value", + ) + + # Make the request + response = client.get_default_service_account(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.devtools.cloudbuild_v1.types.GetDefaultServiceAccountRequest, dict]): + The request object. Returns the default service account that will be used + for ``Builds``. + name (str): + Required. The name of the ``DefaultServiceAccount`` to + retrieve. Format: + ``projects/{project}/locations/{location}/defaultServiceAccount`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.devtools.cloudbuild_v1.types.DefaultServiceAccount: + The default service account used for Builds. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloudbuild.GetDefaultServiceAccountRequest): + request = cloudbuild.GetDefaultServiceAccountRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_default_service_account + ] + + header_params = {} + + routing_param_regex = re.compile( + "^projects/[^/]+/locations/(?P[^/]+)/defaultServiceAccount$" + ) + regex_match = routing_param_regex.match(request.name) + if regex_match and regex_match.group("location"): + header_params["location"] = regex_match.group("location") + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CloudBuildClient": return self diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py index 95234e49acca..67d1481f83e4 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py @@ -295,6 +295,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_default_service_account: gapic_v1.method.wrap_method( + self.get_default_service_account, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -482,6 +487,18 @@ def list_worker_pools( ]: raise NotImplementedError() + @property + def get_default_service_account( + self, + ) -> Callable[ + [cloudbuild.GetDefaultServiceAccountRequest], + Union[ + cloudbuild.DefaultServiceAccount, + Awaitable[cloudbuild.DefaultServiceAccount], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py index aa6eb228e9c3..8b9de6fbc366 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -523,8 +523,9 @@ def approve_build( Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. + If approved, the returned long-running operation (LRO) + will be analogous to the LRO returned from a CreateBuild + call. If rejected, the returned LRO will be immediately done. @@ -554,8 +555,6 @@ def create_build_trigger( Creates a new ``BuildTrigger``. - This API is experimental. - Returns: Callable[[~.CreateBuildTriggerRequest], ~.BuildTrigger]: @@ -582,8 +581,6 @@ def get_build_trigger( Returns information about a ``BuildTrigger``. - This API is experimental. - Returns: Callable[[~.GetBuildTriggerRequest], ~.BuildTrigger]: @@ -612,8 +609,6 @@ def list_build_triggers( Lists existing ``BuildTrigger``\ s. - This API is experimental. - Returns: Callable[[~.ListBuildTriggersRequest], ~.ListBuildTriggersResponse]: @@ -640,8 +635,6 @@ def delete_build_trigger( Deletes a ``BuildTrigger`` by its project ID and trigger ID. - This API is experimental. - Returns: Callable[[~.DeleteBuildTriggerRequest], ~.Empty]: @@ -668,8 +661,6 @@ def update_build_trigger( Updates a ``BuildTrigger`` by its project ID and trigger ID. - This API is experimental. - Returns: Callable[[~.UpdateBuildTriggerRequest], ~.BuildTrigger]: @@ -882,6 +873,36 @@ def list_worker_pools( ) return self._stubs["list_worker_pools"] + @property + def get_default_service_account( + self, + ) -> Callable[ + [cloudbuild.GetDefaultServiceAccountRequest], cloudbuild.DefaultServiceAccount + ]: + r"""Return a callable for the get default service account method over gRPC. + + Returns the ``DefaultServiceAccount`` used by the project. + + Returns: + Callable[[~.GetDefaultServiceAccountRequest], + ~.DefaultServiceAccount]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_default_service_account" not in self._stubs: + self._stubs[ + "get_default_service_account" + ] = self._logged_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetDefaultServiceAccount", + request_serializer=cloudbuild.GetDefaultServiceAccountRequest.serialize, + response_deserializer=cloudbuild.DefaultServiceAccount.deserialize, + ) + return self._stubs["get_default_service_account"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py index 217bacb3c55c..a1dabaff7cb3 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -537,8 +537,9 @@ def approve_build( Approves or rejects a pending build. - If approved, the returned LRO will be analogous to the - LRO returned from a CreateBuild call. + If approved, the returned long-running operation (LRO) + will be analogous to the LRO returned from a CreateBuild + call. If rejected, the returned LRO will be immediately done. @@ -570,8 +571,6 @@ def create_build_trigger( Creates a new ``BuildTrigger``. - This API is experimental. - Returns: Callable[[~.CreateBuildTriggerRequest], Awaitable[~.BuildTrigger]]: @@ -600,8 +599,6 @@ def get_build_trigger( Returns information about a ``BuildTrigger``. - This API is experimental. - Returns: Callable[[~.GetBuildTriggerRequest], Awaitable[~.BuildTrigger]]: @@ -631,8 +628,6 @@ def list_build_triggers( Lists existing ``BuildTrigger``\ s. - This API is experimental. - Returns: Callable[[~.ListBuildTriggersRequest], Awaitable[~.ListBuildTriggersResponse]]: @@ -659,8 +654,6 @@ def delete_build_trigger( Deletes a ``BuildTrigger`` by its project ID and trigger ID. - This API is experimental. - Returns: Callable[[~.DeleteBuildTriggerRequest], Awaitable[~.Empty]]: @@ -689,8 +682,6 @@ def update_build_trigger( Updates a ``BuildTrigger`` by its project ID and trigger ID. - This API is experimental. - Returns: Callable[[~.UpdateBuildTriggerRequest], Awaitable[~.BuildTrigger]]: @@ -912,6 +903,37 @@ def list_worker_pools( ) return self._stubs["list_worker_pools"] + @property + def get_default_service_account( + self, + ) -> Callable[ + [cloudbuild.GetDefaultServiceAccountRequest], + Awaitable[cloudbuild.DefaultServiceAccount], + ]: + r"""Return a callable for the get default service account method over gRPC. + + Returns the ``DefaultServiceAccount`` used by the project. + + Returns: + Callable[[~.GetDefaultServiceAccountRequest], + Awaitable[~.DefaultServiceAccount]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_default_service_account" not in self._stubs: + self._stubs[ + "get_default_service_account" + ] = self._logged_channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetDefaultServiceAccount", + request_serializer=cloudbuild.GetDefaultServiceAccountRequest.serialize, + response_deserializer=cloudbuild.DefaultServiceAccount.deserialize, + ) + return self._stubs["get_default_service_account"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1075,6 +1097,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_default_service_account: self._wrap_method( + self.get_default_service_account, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py index 35379cd3cc30..59980c8bf4e5 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py @@ -142,6 +142,14 @@ def post_get_build_trigger(self, response): logging.log(f"Received response: {response}") return response + def pre_get_default_service_account(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_default_service_account(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_worker_pool(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -606,6 +614,57 @@ def post_get_build_trigger_with_metadata( """ return response, metadata + def pre_get_default_service_account( + self, + request: cloudbuild.GetDefaultServiceAccountRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloudbuild.GetDefaultServiceAccountRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_default_service_account + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_default_service_account( + self, response: cloudbuild.DefaultServiceAccount + ) -> cloudbuild.DefaultServiceAccount: + """Post-rpc interceptor for get_default_service_account + + DEPRECATED. Please use the `post_get_default_service_account_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. This `post_get_default_service_account` interceptor runs + before the `post_get_default_service_account_with_metadata` interceptor. + """ + return response + + def post_get_default_service_account_with_metadata( + self, + response: cloudbuild.DefaultServiceAccount, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + cloudbuild.DefaultServiceAccount, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_default_service_account + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudBuild server but before it is returned to user code. + + We recommend only using this `post_get_default_service_account_with_metadata` + interceptor in new development instead of the `post_get_default_service_account` interceptor. + When both interceptors are used, this `post_get_default_service_account_with_metadata` interceptor runs after the + `post_get_default_service_account` interceptor. The (possibly modified) response returned by + `post_get_default_service_account` will be passed to + `post_get_default_service_account_with_metadata`. + """ + return response, metadata + def pre_get_worker_pool( self, request: cloudbuild.GetWorkerPoolRequest, @@ -2558,6 +2617,156 @@ def __call__( ) return resp + class _GetDefaultServiceAccount( + _BaseCloudBuildRestTransport._BaseGetDefaultServiceAccount, CloudBuildRestStub + ): + def __hash__(self): + return hash("CloudBuildRestTransport.GetDefaultServiceAccount") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: cloudbuild.GetDefaultServiceAccountRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloudbuild.DefaultServiceAccount: + r"""Call the get default service + account method over HTTP. + + Args: + request (~.cloudbuild.GetDefaultServiceAccountRequest): + The request object. Returns the default service account that will be used + for ``Builds``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cloudbuild.DefaultServiceAccount: + The default service account used for ``Builds``. + """ + + http_options = ( + _BaseCloudBuildRestTransport._BaseGetDefaultServiceAccount._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_default_service_account( + request, metadata + ) + transcoded_request = _BaseCloudBuildRestTransport._BaseGetDefaultServiceAccount._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCloudBuildRestTransport._BaseGetDefaultServiceAccount._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.devtools.cloudbuild_v1.CloudBuildClient.GetDefaultServiceAccount", + extra={ + "serviceName": "google.devtools.cloudbuild.v1.CloudBuild", + "rpcName": "GetDefaultServiceAccount", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudBuildRestTransport._GetDefaultServiceAccount._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.DefaultServiceAccount() + pb_resp = cloudbuild.DefaultServiceAccount.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_default_service_account(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_default_service_account_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = cloudbuild.DefaultServiceAccount.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.devtools.cloudbuild_v1.CloudBuildClient.get_default_service_account", + extra={ + "serviceName": "google.devtools.cloudbuild.v1.CloudBuild", + "rpcName": "GetDefaultServiceAccount", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetWorkerPool( _BaseCloudBuildRestTransport._BaseGetWorkerPool, CloudBuildRestStub ): @@ -4006,6 +4215,16 @@ def get_build_trigger( # In C++ this would require a dynamic_cast return self._GetBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + @property + def get_default_service_account( + self, + ) -> Callable[ + [cloudbuild.GetDefaultServiceAccountRequest], cloudbuild.DefaultServiceAccount + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDefaultServiceAccount(self._session, self._host, self._interceptor) # type: ignore + @property def get_worker_pool( self, diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest_base.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest_base.py index 77483ea0b21b..c42417b0986e 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest_base.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest_base.py @@ -596,6 +596,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetDefaultServiceAccount: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/defaultServiceAccount}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloudbuild.GetDefaultServiceAccountRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCloudBuildRestTransport._BaseGetDefaultServiceAccount._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetWorkerPool: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/__init__.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/__init__.py index 85dc65297af7..9e045520032b 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/__init__.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/__init__.py @@ -27,10 +27,12 @@ BuildTrigger, BuiltImage, CancelBuildRequest, + ConnectedRepository, CreateBuildRequest, CreateBuildTriggerRequest, CreateWorkerPoolOperationMetadata, CreateWorkerPoolRequest, + DefaultServiceAccount, DeleteBuildTriggerRequest, DeleteWorkerPoolOperationMetadata, DeleteWorkerPoolRequest, @@ -38,6 +40,7 @@ FileHashes, GetBuildRequest, GetBuildTriggerRequest, + GetDefaultServiceAccountRequest, GetWorkerPoolRequest, GitConfig, GitFileSource, @@ -99,10 +102,12 @@ "BuildTrigger", "BuiltImage", "CancelBuildRequest", + "ConnectedRepository", "CreateBuildRequest", "CreateBuildTriggerRequest", "CreateWorkerPoolOperationMetadata", "CreateWorkerPoolRequest", + "DefaultServiceAccount", "DeleteBuildTriggerRequest", "DeleteWorkerPoolOperationMetadata", "DeleteWorkerPoolRequest", @@ -110,6 +115,7 @@ "FileHashes", "GetBuildRequest", "GetBuildTriggerRequest", + "GetDefaultServiceAccountRequest", "GetWorkerPoolRequest", "GitConfig", "GitFileSource", diff --git a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index 620ad73cd97e..de4a292e4a60 100644 --- a/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/packages/google-cloud-build/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -26,12 +26,15 @@ __protobuf__ = proto.module( package="google.devtools.cloudbuild.v1", manifest={ + "GetDefaultServiceAccountRequest", + "DefaultServiceAccount", "RetryBuildRequest", "RunBuildTriggerRequest", "StorageSource", "GitSource", "RepoSource", "StorageSourceManifest", + "ConnectedRepository", "Source", "BuiltImage", "UploadedPythonPackage", @@ -99,6 +102,56 @@ ) +class GetDefaultServiceAccountRequest(proto.Message): + r"""Returns the default service account that will be used for + ``Builds``. + + Attributes: + name (str): + Required. The name of the ``DefaultServiceAccount`` to + retrieve. Format: + ``projects/{project}/locations/{location}/defaultServiceAccount`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DefaultServiceAccount(proto.Message): + r"""The default service account used for ``Builds``. + + Attributes: + name (str): + Identifier. Format: + \`projects/{project}/locations/{location}/defaultServiceAccount + service_account_email (str): + Output only. The email address of the service account + identity that will be used for a build by default. + + This is returned in the format + ``projects/{project}/serviceAccounts/{service_account}`` + where ``{service_account}`` could be the legacy Cloud Build + SA, in the format + [PROJECT_NUMBER]@cloudbuild.gserviceaccount.com or the + Compute SA, in the format + [PROJECT_NUMBER]-compute@developer.gserviceaccount.com. + + If no service account will be used by default, this will be + empty. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=2, + ) + + class RetryBuildRequest(proto.Message): r"""Specifies a build to retry. @@ -170,17 +223,17 @@ class StorageSource(proto.Message): Cloud Storage bucket containing the source (see `Bucket Name Requirements `__). object_ (str): - Cloud Storage object containing the source. + Required. Cloud Storage object containing the source. This object must be a zipped (``.zip``) or gzipped archive file (``.tar.gz``) containing source to build. generation (int): - Cloud Storage generation for the object. If - the generation is omitted, the latest generation - will be used. + Optional. Cloud Storage generation for the + object. If the generation is omitted, the latest + generation will be used. source_fetcher (google.cloud.devtools.cloudbuild_v1.types.StorageSource.SourceFetcher): - Option to specify the tool to fetch the - source file for the build. + Optional. Option to specify the tool to fetch + the source file for the build. """ class SourceFetcher(proto.Enum): @@ -188,7 +241,7 @@ class SourceFetcher(proto.Enum): Values: SOURCE_FETCHER_UNSPECIFIED (0): - Unspecified. Defaults to GSUTIL. + Unspecified defaults to GSUTIL. GSUTIL (1): Use the "gsutil" tool to download the source file. @@ -224,20 +277,20 @@ class GitSource(proto.Message): Attributes: url (str): - Location of the Git repo to build. + Required. Location of the Git repo to build. This will be used as a ``git remote``, see https://git-scm.com/docs/git-remote. dir_ (str): - Directory, relative to the source root, in which to run the - build. + Optional. Directory, relative to the source root, in which + to run the build. This must be a relative path. If a step's ``dir`` is specified and is an absolute path, this value is ignored for that step's execution. revision (str): - The revision to fetch from the Git repository such as a - branch, a tag, a commit SHA, or any Git ref. + Optional. The revision to fetch from the Git repository such + as a branch, a tag, a commit SHA, or any Git ref. Cloud Build uses ``git fetch`` to fetch the revision from the Git repository; therefore make sure that the string you @@ -274,11 +327,12 @@ class RepoSource(proto.Message): Attributes: project_id (str): - ID of the project that owns the Cloud Source - Repository. If omitted, the project ID - requesting the build is assumed. + Optional. ID of the project that owns the + Cloud Source Repository. If omitted, the project + ID requesting the build is assumed. repo_name (str): - Name of the Cloud Source Repository. + Required. Name of the Cloud Source + Repository. branch_name (str): Regex matching branches to build. @@ -300,18 +354,19 @@ class RepoSource(proto.Message): This field is a member of `oneof`_ ``revision``. dir_ (str): - Directory, relative to the source root, in which to run the - build. + Optional. Directory, relative to the source root, in which + to run the build. This must be a relative path. If a step's ``dir`` is specified and is an absolute path, this value is ignored for that step's execution. invert_regex (bool): - Only trigger a build if the revision regex - does NOT match the revision regex. + Optional. Only trigger a build if the + revision regex does NOT match the revision + regex. substitutions (MutableMapping[str, str]): - Substitutions to use in a triggered build. - Should only be used with RunBuildTrigger + Optional. Substitutions to use in a triggered + build. Should only be used with RunBuildTrigger """ project_id: str = proto.Field( @@ -359,12 +414,12 @@ class StorageSourceManifest(proto.Message): Attributes: bucket (str): - Cloud Storage bucket containing the source manifest (see - `Bucket Name + Required. Cloud Storage bucket containing the source + manifest (see `Bucket Name Requirements `__). object_ (str): - Cloud Storage object containing the source - manifest. + Required. Cloud Storage object containing the + source manifest. This object must be a JSON file. generation (int): Cloud Storage generation for the object. If @@ -386,6 +441,38 @@ class StorageSourceManifest(proto.Message): ) +class ConnectedRepository(proto.Message): + r"""Location of the source in a 2nd-gen Google Cloud Build + repository resource. + + Attributes: + repository (str): + Required. Name of the Google Cloud Build repository, + formatted as + ``projects/*/locations/*/connections/*/repositories/*``. + dir_ (str): + Optional. Directory, relative to the source + root, in which to run the build. + revision (str): + Required. The revision to fetch from the Git + repository such as a branch, a tag, a commit + SHA, or any Git ref. + """ + + repository: str = proto.Field( + proto.STRING, + number=1, + ) + dir_: str = proto.Field( + proto.STRING, + number=2, + ) + revision: str = proto.Field( + proto.STRING, + number=3, + ) + + class Source(proto.Message): r"""Location of the source in a supported storage service. @@ -417,6 +504,12 @@ class Source(proto.Message): Storage. This feature is in Preview; see description `here `__. + This field is a member of `oneof`_ ``source``. + connected_repository (google.cloud.devtools.cloudbuild_v1.types.ConnectedRepository): + Optional. If provided, get the source from + this 2nd-gen Google Cloud Build repository + resource. + This field is a member of `oneof`_ ``source``. """ @@ -444,6 +537,12 @@ class Source(proto.Message): oneof="source", message="StorageSourceManifest", ) + connected_repository: "ConnectedRepository" = proto.Field( + proto.MESSAGE, + number=9, + oneof="source", + message="ConnectedRepository", + ) class BuiltImage(proto.Message): @@ -458,6 +557,9 @@ class BuiltImage(proto.Message): push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): Output only. Stores timing information for pushing the specified image. + artifact_registry_package (str): + Output only. Path to the artifact in Artifact + Registry. """ name: str = proto.Field( @@ -473,6 +575,10 @@ class BuiltImage(proto.Message): number=4, message="TimeSpan", ) + artifact_registry_package: str = proto.Field( + proto.STRING, + number=5, + ) class UploadedPythonPackage(proto.Message): @@ -486,6 +592,9 @@ class UploadedPythonPackage(proto.Message): push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): Output only. Stores timing information for pushing the specified artifact. + artifact_registry_package (str): + Output only. Path to the artifact in Artifact + Registry. """ uri: str = proto.Field( @@ -502,6 +611,10 @@ class UploadedPythonPackage(proto.Message): number=3, message="TimeSpan", ) + artifact_registry_package: str = proto.Field( + proto.STRING, + number=4, + ) class UploadedMavenArtifact(proto.Message): @@ -515,6 +628,9 @@ class UploadedMavenArtifact(proto.Message): push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): Output only. Stores timing information for pushing the specified artifact. + artifact_registry_package (str): + Output only. Path to the artifact in Artifact + Registry. """ uri: str = proto.Field( @@ -531,6 +647,10 @@ class UploadedMavenArtifact(proto.Message): number=3, message="TimeSpan", ) + artifact_registry_package: str = proto.Field( + proto.STRING, + number=4, + ) class UploadedGoModule(proto.Message): @@ -546,6 +666,9 @@ class UploadedGoModule(proto.Message): push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): Output only. Stores timing information for pushing the specified artifact. + artifact_registry_package (str): + Output only. Path to the artifact in Artifact + Registry. """ uri: str = proto.Field( @@ -562,6 +685,10 @@ class UploadedGoModule(proto.Message): number=3, message="TimeSpan", ) + artifact_registry_package: str = proto.Field( + proto.STRING, + number=4, + ) class UploadedNpmPackage(proto.Message): @@ -576,6 +703,9 @@ class UploadedNpmPackage(proto.Message): push_timing (google.cloud.devtools.cloudbuild_v1.types.TimeSpan): Output only. Stores timing information for pushing the specified artifact. + artifact_registry_package (str): + Output only. Path to the artifact in Artifact + Registry. """ uri: str = proto.Field( @@ -592,6 +722,10 @@ class UploadedNpmPackage(proto.Message): number=3, message="TimeSpan", ) + artifact_registry_package: str = proto.Field( + proto.STRING, + number=7, + ) class BuildStep(proto.Message): @@ -987,7 +1121,8 @@ class Build(proto.Message): Output only. Customer-readable message about the current status. source (google.cloud.devtools.cloudbuild_v1.types.Source): - The location of the source files to build. + Optional. The location of the source files to + build. steps (MutableSequence[google.cloud.devtools.cloudbuild_v1.types.BuildStep]): Required. The operations to be performed on the workspace. @@ -1464,9 +1599,7 @@ class GitSourceRepository(proto.Message): This field is a member of `oneof`_ ``repotype``. developer_connect (str): - The Developer Connect Git repository link or the url that - matches a repository link in the current project, formatted - as + The Developer Connect Git repository link formatted as ``projects/*/locations/*/connections/*/gitRepositoryLink/*`` This field is a member of `oneof`_ ``repotype``. @@ -1654,8 +1787,8 @@ class MavenArtifact(proto.Message): be uploaded to Artifact Registry with this location as a prefix. path (str): - Path to an artifact in the build's workspace - to be uploaded to Artifact Registry. + Optional. Path to an artifact in the build's + workspace to be uploaded to Artifact Registry. This can be either an absolute path, e.g. /workspace/my-app/target/my-app-1.0.SNAPSHOT.jar @@ -1796,8 +1929,11 @@ class NpmPackage(proto.Message): will be zipped and uploaded to Artifact Registry with this location as a prefix. package_path (str): - Path to the package.json. - e.g. workspace/path/to/package + Optional. Path to the package.json. e.g. + workspace/path/to/package + + Only one of ``archive`` or ``package_path`` can be + specified. """ repository: str = proto.Field( @@ -1892,6 +2028,13 @@ class SourceProvenance(proto.Message): A copy of the build's ``source.storage_source_manifest``, if exists, with any revisions resolved. This feature is in Preview. + resolved_connected_repository (google.cloud.devtools.cloudbuild_v1.types.ConnectedRepository): + Output only. A copy of the build's + ``source.connected_repository``, if exists, with any + revisions resolved. + resolved_git_source (google.cloud.devtools.cloudbuild_v1.types.GitSource): + Output only. A copy of the build's ``source.git_source``, if + exists, with any revisions resolved. file_hashes (MutableMapping[str, google.cloud.devtools.cloudbuild_v1.types.FileHashes]): Output only. Hash(es) of the build source, which can be used to verify that the original source integrity was maintained @@ -1922,6 +2065,16 @@ class SourceProvenance(proto.Message): number=9, message="StorageSourceManifest", ) + resolved_connected_repository: "ConnectedRepository" = proto.Field( + proto.MESSAGE, + number=10, + message="ConnectedRepository", + ) + resolved_git_source: "GitSource" = proto.Field( + proto.MESSAGE, + number=11, + message="GitSource", + ) file_hashes: MutableMapping[str, "FileHashes"] = proto.MapField( proto.STRING, proto.MESSAGE, @@ -2964,8 +3117,8 @@ class PubsubConfig(proto.Message): Output only. Name of the subscription. Format is ``projects/{project}/subscriptions/{subscription}``. topic (str): - The name of the topic from which this subscription is - receiving messages. Format is + Optional. The name of the topic from which this subscription + is receiving messages. Format is ``projects/{project}/topics/{topic}``. service_account_email (str): Service account that will make the push @@ -3766,8 +3919,8 @@ class GitHubEnterpriseConfig(proto.Message): Attributes: name (str): - Optional. The full resource name for the - GitHubEnterpriseConfig For example: + The full resource name for the GitHubEnterpriseConfig For + example: "projects/{$project_id}/locations/{$location_id}/githubEnterpriseConfigs/{$config_id}". host_url (str): The URL of the github enterprise host the @@ -3794,9 +3947,9 @@ class GitHubEnterpriseConfig(proto.Message): {project} is a project number or id and {network} is the name of a VPC network in the project. secrets (google.cloud.devtools.cloudbuild_v1.types.GitHubEnterpriseSecrets): - Names of secrets in Secret Manager. + Optional. Names of secrets in Secret Manager. display_name (str): - Name to display for this config. + Optional. Name to display for this config. ssl_ca (str): Optional. SSL certificate to use for requests to GitHub Enterprise. @@ -3929,7 +4082,7 @@ class WorkerPool(proto.Message): state (google.cloud.devtools.cloudbuild_v1.types.WorkerPool.State): Output only. ``WorkerPool`` state. private_pool_v1_config (google.cloud.devtools.cloudbuild_v1.types.PrivatePoolV1Config): - Legacy Private Pool configuration. + Private Pool configuration. This field is a member of `oneof`_ ``config``. etag (str): @@ -4299,7 +4452,8 @@ class UpdateWorkerPoolRequest(proto.Message): update. Format: ``projects/{project}/locations/{location}/workerPools/{workerPool}``. update_mask (google.protobuf.field_mask_pb2.FieldMask): - A mask specifying which fields in ``worker_pool`` to update. + Optional. A mask specifying which fields in ``worker_pool`` + to update. validate_only (bool): If set, validate the request and preview the response, but do not actually post it. diff --git a/packages/google-cloud-build/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_default_service_account_async.py b/packages/google-cloud-build/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_default_service_account_async.py new file mode 100644 index 000000000000..6e576a3c65e1 --- /dev/null +++ b/packages/google-cloud-build/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_default_service_account_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDefaultServiceAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetDefaultServiceAccount_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +async def sample_get_default_service_account(): + # Create a client + client = cloudbuild_v1.CloudBuildAsyncClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetDefaultServiceAccountRequest( + name="name_value", + ) + + # Make the request + response = await client.get_default_service_account(request=request) + + # Handle the response + print(response) + + +# [END cloudbuild_v1_generated_CloudBuild_GetDefaultServiceAccount_async] diff --git a/packages/google-cloud-build/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_default_service_account_sync.py b/packages/google-cloud-build/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_default_service_account_sync.py new file mode 100644 index 000000000000..57306004ad4f --- /dev/null +++ b/packages/google-cloud-build/samples/generated_samples/cloudbuild_v1_generated_cloud_build_get_default_service_account_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDefaultServiceAccount +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-build + + +# [START cloudbuild_v1_generated_CloudBuild_GetDefaultServiceAccount_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.devtools import cloudbuild_v1 + + +def sample_get_default_service_account(): + # Create a client + client = cloudbuild_v1.CloudBuildClient() + + # Initialize request argument(s) + request = cloudbuild_v1.GetDefaultServiceAccountRequest( + name="name_value", + ) + + # Make the request + response = client.get_default_service_account(request=request) + + # Handle the response + print(response) + + +# [END cloudbuild_v1_generated_CloudBuild_GetDefaultServiceAccount_sync] diff --git a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index 01cbcbae464e..943452592e06 100644 --- a/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/packages/google-cloud-build/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -210,6 +210,10 @@ "name": "id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -294,6 +298,10 @@ "name": "id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -379,6 +387,10 @@ "name": "trigger", "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" }, + { + "name": "parent", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -463,6 +475,10 @@ "name": "trigger", "type": "google.cloud.devtools.cloudbuild_v1.types.BuildTrigger" }, + { + "name": "parent", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -548,6 +564,10 @@ "name": "build", "type": "google.cloud.devtools.cloudbuild_v1.types.Build" }, + { + "name": "parent", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -632,6 +652,10 @@ "name": "build", "type": "google.cloud.devtools.cloudbuild_v1.types.Build" }, + { + "name": "parent", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -894,6 +918,10 @@ "name": "trigger_id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -975,6 +1003,10 @@ "name": "trigger_id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1218,6 +1250,10 @@ "name": "trigger_id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1302,6 +1338,10 @@ "name": "trigger_id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1387,6 +1427,10 @@ "name": "id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1471,6 +1515,10 @@ "name": "id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1526,6 +1574,167 @@ ], "title": "cloudbuild_v1_generated_cloud_build_get_build_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient", + "shortName": "CloudBuildAsyncClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildAsyncClient.get_default_service_account", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetDefaultServiceAccount", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetDefaultServiceAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetDefaultServiceAccountRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.DefaultServiceAccount", + "shortName": "get_default_service_account" + }, + "description": "Sample for GetDefaultServiceAccount", + "file": "cloudbuild_v1_generated_cloud_build_get_default_service_account_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetDefaultServiceAccount_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_default_service_account_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient", + "shortName": "CloudBuildClient" + }, + "fullName": "google.cloud.devtools.cloudbuild_v1.CloudBuildClient.get_default_service_account", + "method": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild.GetDefaultServiceAccount", + "service": { + "fullName": "google.devtools.cloudbuild.v1.CloudBuild", + "shortName": "CloudBuild" + }, + "shortName": "GetDefaultServiceAccount" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.devtools.cloudbuild_v1.types.GetDefaultServiceAccountRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.devtools.cloudbuild_v1.types.DefaultServiceAccount", + "shortName": "get_default_service_account" + }, + "description": "Sample for GetDefaultServiceAccount", + "file": "cloudbuild_v1_generated_cloud_build_get_default_service_account_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudbuild_v1_generated_CloudBuild_GetDefaultServiceAccount_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudbuild_v1_generated_cloud_build_get_default_service_account_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2361,6 +2570,10 @@ "name": "id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2445,6 +2658,10 @@ "name": "id", "type": "str" }, + { + "name": "name", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" diff --git a/packages/google-cloud-build/scripts/fixup_cloudbuild_v1_keywords.py b/packages/google-cloud-build/scripts/fixup_cloudbuild_v1_keywords.py index 4c5a62aa55e0..2092abd33c90 100644 --- a/packages/google-cloud-build/scripts/fixup_cloudbuild_v1_keywords.py +++ b/packages/google-cloud-build/scripts/fixup_cloudbuild_v1_keywords.py @@ -48,6 +48,7 @@ class cloudbuildCallTransformer(cst.CSTTransformer): 'delete_worker_pool': ('name', 'etag', 'allow_missing', 'validate_only', ), 'get_build': ('project_id', 'id', 'name', ), 'get_build_trigger': ('project_id', 'trigger_id', 'name', ), + 'get_default_service_account': ('name', ), 'get_worker_pool': ('name', ), 'list_builds': ('project_id', 'parent', 'page_size', 'page_token', 'filter', ), 'list_build_triggers': ('project_id', 'parent', 'page_size', 'page_token', ), diff --git a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index efbf28e3c4bc..b0ef7ee042b8 100644 --- a/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/packages/google-cloud-build/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -1285,6 +1285,7 @@ def test_create_build_flattened(): client.create_build( project_id="project_id_value", build=cloudbuild.Build(name="name_value"), + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1297,6 +1298,9 @@ def test_create_build_flattened(): arg = args[0].build mock_val = cloudbuild.Build(name="name_value") assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_create_build_flattened_error(): @@ -1311,6 +1315,7 @@ def test_create_build_flattened_error(): cloudbuild.CreateBuildRequest(), project_id="project_id_value", build=cloudbuild.Build(name="name_value"), + parent="parent_value", ) @@ -1333,6 +1338,7 @@ async def test_create_build_flattened_async(): response = await client.create_build( project_id="project_id_value", build=cloudbuild.Build(name="name_value"), + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -1345,6 +1351,9 @@ async def test_create_build_flattened_async(): arg = args[0].build mock_val = cloudbuild.Build(name="name_value") assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1360,6 +1369,7 @@ async def test_create_build_flattened_error_async(): cloudbuild.CreateBuildRequest(), project_id="project_id_value", build=cloudbuild.Build(name="name_value"), + parent="parent_value", ) @@ -1599,6 +1609,7 @@ def test_get_build_flattened(): client.get_build( project_id="project_id_value", id="id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1611,6 +1622,9 @@ def test_get_build_flattened(): arg = args[0].id mock_val = "id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_build_flattened_error(): @@ -1625,6 +1639,7 @@ def test_get_build_flattened_error(): cloudbuild.GetBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -1645,6 +1660,7 @@ async def test_get_build_flattened_async(): response = await client.get_build( project_id="project_id_value", id="id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -1657,6 +1673,9 @@ async def test_get_build_flattened_async(): arg = args[0].id mock_val = "id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1672,6 +1691,7 @@ async def test_get_build_flattened_error_async(): cloudbuild.GetBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -2382,6 +2402,7 @@ def test_cancel_build_flattened(): client.cancel_build( project_id="project_id_value", id="id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2394,6 +2415,9 @@ def test_cancel_build_flattened(): arg = args[0].id mock_val = "id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_cancel_build_flattened_error(): @@ -2408,6 +2432,7 @@ def test_cancel_build_flattened_error(): cloudbuild.CancelBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -2428,6 +2453,7 @@ async def test_cancel_build_flattened_async(): response = await client.cancel_build( project_id="project_id_value", id="id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2440,6 +2466,9 @@ async def test_cancel_build_flattened_async(): arg = args[0].id mock_val = "id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2455,6 +2484,7 @@ async def test_cancel_build_flattened_error_async(): cloudbuild.CancelBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -2660,6 +2690,7 @@ def test_retry_build_flattened(): client.retry_build( project_id="project_id_value", id="id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2672,6 +2703,9 @@ def test_retry_build_flattened(): arg = args[0].id mock_val = "id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_retry_build_flattened_error(): @@ -2686,6 +2720,7 @@ def test_retry_build_flattened_error(): cloudbuild.RetryBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -2708,6 +2743,7 @@ async def test_retry_build_flattened_async(): response = await client.retry_build( project_id="project_id_value", id="id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -2720,6 +2756,9 @@ async def test_retry_build_flattened_async(): arg = args[0].id mock_val = "id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2735,6 +2774,7 @@ async def test_retry_build_flattened_error_async(): cloudbuild.RetryBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -3267,6 +3307,7 @@ def test_create_build_trigger_flattened(): client.create_build_trigger( project_id="project_id_value", trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3279,6 +3320,9 @@ def test_create_build_trigger_flattened(): arg = args[0].trigger mock_val = cloudbuild.BuildTrigger(resource_name="resource_name_value") assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_create_build_trigger_flattened_error(): @@ -3293,6 +3337,7 @@ def test_create_build_trigger_flattened_error(): cloudbuild.CreateBuildTriggerRequest(), project_id="project_id_value", trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + parent="parent_value", ) @@ -3317,6 +3362,7 @@ async def test_create_build_trigger_flattened_async(): response = await client.create_build_trigger( project_id="project_id_value", trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + parent="parent_value", ) # Establish that the underlying call was made with the expected @@ -3329,6 +3375,9 @@ async def test_create_build_trigger_flattened_async(): arg = args[0].trigger mock_val = cloudbuild.BuildTrigger(resource_name="resource_name_value") assert arg == mock_val + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -3344,6 +3393,7 @@ async def test_create_build_trigger_flattened_error_async(): cloudbuild.CreateBuildTriggerRequest(), project_id="project_id_value", trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + parent="parent_value", ) @@ -3592,6 +3642,7 @@ def test_get_build_trigger_flattened(): client.get_build_trigger( project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3604,6 +3655,9 @@ def test_get_build_trigger_flattened(): arg = args[0].trigger_id mock_val = "trigger_id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_build_trigger_flattened_error(): @@ -3618,6 +3672,7 @@ def test_get_build_trigger_flattened_error(): cloudbuild.GetBuildTriggerRequest(), project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) @@ -3642,6 +3697,7 @@ async def test_get_build_trigger_flattened_async(): response = await client.get_build_trigger( project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -3654,6 +3710,9 @@ async def test_get_build_trigger_flattened_async(): arg = args[0].trigger_id mock_val = "trigger_id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -3669,6 +3728,7 @@ async def test_get_build_trigger_flattened_error_async(): cloudbuild.GetBuildTriggerRequest(), project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) @@ -4353,6 +4413,7 @@ def test_delete_build_trigger_flattened(): client.delete_build_trigger( project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4365,6 +4426,9 @@ def test_delete_build_trigger_flattened(): arg = args[0].trigger_id mock_val = "trigger_id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_build_trigger_flattened_error(): @@ -4379,6 +4443,7 @@ def test_delete_build_trigger_flattened_error(): cloudbuild.DeleteBuildTriggerRequest(), project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) @@ -4401,6 +4466,7 @@ async def test_delete_build_trigger_flattened_async(): response = await client.delete_build_trigger( project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) # Establish that the underlying call was made with the expected @@ -4413,6 +4479,9 @@ async def test_delete_build_trigger_flattened_async(): arg = args[0].trigger_id mock_val = "trigger_id_value" assert arg == mock_val + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -4428,6 +4497,7 @@ async def test_delete_build_trigger_flattened_error_async(): cloudbuild.DeleteBuildTriggerRequest(), project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) @@ -6950,13 +7020,83 @@ async def test_list_worker_pools_async_pages(): assert page_.raw_page.next_page_token == token -def test_create_build_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.GetDefaultServiceAccountRequest, + dict, + ], +) +def test_get_default_service_account(request_type, transport: str = "grpc"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.DefaultServiceAccount( + name="name_value", + service_account_email="service_account_email_value", + ) + response = client.get_default_service_account(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloudbuild.GetDefaultServiceAccountRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.DefaultServiceAccount) + assert response.name == "name_value" + assert response.service_account_email == "service_account_email_value" + + +def test_get_default_service_account_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloudbuild.GetDefaultServiceAccountRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_default_service_account(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloudbuild.GetDefaultServiceAccountRequest( + name="name_value", + ) + + +def test_get_default_service_account_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -6964,108 +7104,320 @@ def test_create_build_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_build in client._transport._wrapped_methods + assert ( + client._transport.get_default_service_account + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_build] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.get_default_service_account + ] = mock_rpc request = {} - client.create_build(request) + client.get_default_service_account(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_build(request) + client.get_default_service_account(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_build_rest_required_fields(request_type=cloudbuild.CreateBuildRequest): - transport_class = transports.CloudBuildRestTransport +@pytest.mark.asyncio +async def test_get_default_service_account_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudBuildAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["project_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.get_default_service_account + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_build._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_default_service_account + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.get_default_service_account(request) - jsonified_request["projectId"] = "project_id_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_build._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("parent",)) - jsonified_request.update(unset_fields) + await client.get_default_service_account(request) - # verify required fields with non-default values are left alone - assert "projectId" in jsonified_request - assert jsonified_request["projectId"] == "project_id_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_get_default_service_account_async( + transport: str = "grpc_asyncio", + request_type=cloudbuild.GetDefaultServiceAccountRequest, +): + client = CloudBuildAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.DefaultServiceAccount( + name="name_value", + service_account_email="service_account_email_value", + ) + ) + response = await client.get_default_service_account(request) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloudbuild.GetDefaultServiceAccountRequest() + assert args[0] == request - response = client.create_build(request) + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.DefaultServiceAccount) + assert response.name == "name_value" + assert response.service_account_email == "service_account_email_value" - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_get_default_service_account_async_from_dict(): + await test_get_default_service_account_async(request_type=dict) -def test_create_build_rest_unset_required_fields(): - transport = transports.CloudBuildRestTransport( - credentials=ga_credentials.AnonymousCredentials + +def test_get_default_service_account_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.DefaultServiceAccount() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_default_service_account( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_default_service_account_flattened_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_default_service_account( + cloudbuild.GetDefaultServiceAccountRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_default_service_account_flattened_async(): + client = CloudBuildAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = cloudbuild.DefaultServiceAccount() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.DefaultServiceAccount() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_default_service_account( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_default_service_account_flattened_error_async(): + client = CloudBuildAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_default_service_account( + cloudbuild.GetDefaultServiceAccountRequest(), + name="name_value", + ) + + +def test_create_build_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_build in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_build] = mock_rpc + + request = {} + client.create_build(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_build(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_build_rest_required_fields(request_type=cloudbuild.CreateBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_build._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_build(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials ) unset_fields = transport.create_build._get_unset_required_fields({}) @@ -7098,6 +7450,7 @@ def test_create_build_rest_flattened(): mock_args = dict( project_id="project_id_value", build=cloudbuild.Build(name="name_value"), + parent="parent_value", ) mock_args.update(sample_request) @@ -7133,6 +7486,7 @@ def test_create_build_rest_flattened_error(transport: str = "rest"): cloudbuild.CreateBuildRequest(), project_id="project_id_value", build=cloudbuild.Build(name="name_value"), + parent="parent_value", ) @@ -7286,6 +7640,7 @@ def test_get_build_rest_flattened(): mock_args = dict( project_id="project_id_value", id="id_value", + name="name_value", ) mock_args.update(sample_request) @@ -7323,6 +7678,7 @@ def test_get_build_rest_flattened_error(transport: str = "rest"): cloudbuild.GetBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -7731,6 +8087,7 @@ def test_cancel_build_rest_flattened(): mock_args = dict( project_id="project_id_value", id="id_value", + name="name_value", ) mock_args.update(sample_request) @@ -7769,6 +8126,7 @@ def test_cancel_build_rest_flattened_error(transport: str = "rest"): cloudbuild.CancelBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -7922,6 +8280,7 @@ def test_retry_build_rest_flattened(): mock_args = dict( project_id="project_id_value", id="id_value", + name="name_value", ) mock_args.update(sample_request) @@ -7958,6 +8317,7 @@ def test_retry_build_rest_flattened_error(transport: str = "rest"): cloudbuild.RetryBuildRequest(), project_id="project_id_value", id="id_value", + name="name_value", ) @@ -8296,6 +8656,7 @@ def test_create_build_trigger_rest_flattened(): mock_args = dict( project_id="project_id_value", trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + parent="parent_value", ) mock_args.update(sample_request) @@ -8333,6 +8694,7 @@ def test_create_build_trigger_rest_flattened_error(transport: str = "rest"): cloudbuild.CreateBuildTriggerRequest(), project_id="project_id_value", trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + parent="parent_value", ) @@ -8490,6 +8852,7 @@ def test_get_build_trigger_rest_flattened(): mock_args = dict( project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -8529,6 +8892,7 @@ def test_get_build_trigger_rest_flattened_error(transport: str = "rest"): cloudbuild.GetBuildTriggerRequest(), project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) @@ -8945,6 +9309,7 @@ def test_delete_build_trigger_rest_flattened(): mock_args = dict( project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -8982,6 +9347,7 @@ def test_delete_build_trigger_rest_flattened_error(transport: str = "rest"): cloudbuild.DeleteBuildTriggerRequest(), project_id="project_id_value", trigger_id="trigger_id_value", + name="name_value", ) @@ -10482,6 +10848,191 @@ def test_list_worker_pools_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token +def test_get_default_service_account_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_default_service_account + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_default_service_account + ] = mock_rpc + + request = {} + client.get_default_service_account(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_default_service_account(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_default_service_account_rest_required_fields( + request_type=cloudbuild.GetDefaultServiceAccountRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_default_service_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_default_service_account._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.DefaultServiceAccount() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloudbuild.DefaultServiceAccount.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_default_service_account(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_default_service_account_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_default_service_account._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_default_service_account_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.DefaultServiceAccount() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/defaultServiceAccount" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloudbuild.DefaultServiceAccount.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_default_service_account(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/defaultServiceAccount}" + % client.transport._host, + args[1], + ) + + +def test_get_default_service_account_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_default_service_account( + cloudbuild.GetDefaultServiceAccountRequest(), + name="name_value", + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudBuildGrpcTransport( @@ -10988,6 +11539,29 @@ def test_list_worker_pools_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_default_service_account_empty_call_grpc(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + call.return_value = cloudbuild.DefaultServiceAccount() + client.get_default_service_account(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudbuild.GetDefaultServiceAccountRequest() + + assert args[0] == request_msg + + def test_create_build_routing_parameters_request_1_grpc(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11496,6 +12070,36 @@ def test_list_worker_pools_routing_parameters_request_1_grpc(): ) +def test_get_default_service_account_routing_parameters_request_1_grpc(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + call.return_value = cloudbuild.DefaultServiceAccount() + client.get_default_service_account( + request={"name": "projects/sample1/locations/sample2/defaultServiceAccount"} + ) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = cloudbuild.GetDefaultServiceAccountRequest( + **{"name": "projects/sample1/locations/sample2/defaultServiceAccount"} + ) + + assert args[0] == request_msg + + expected_headers = {"location": "sample2"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + def test_transport_kind_grpc_asyncio(): transport = CloudBuildAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -12049,6 +12653,36 @@ async def test_list_worker_pools_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_default_service_account_empty_call_grpc_asyncio(): + client = CloudBuildAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.DefaultServiceAccount( + name="name_value", + service_account_email="service_account_email_value", + ) + ) + await client.get_default_service_account(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudbuild.GetDefaultServiceAccountRequest() + + assert args[0] == request_msg + + @pytest.mark.asyncio async def test_create_build_routing_parameters_request_1_grpc_asyncio(): client = CloudBuildAsyncClient( @@ -12696,6 +13330,43 @@ async def test_list_worker_pools_routing_parameters_request_1_grpc_asyncio(): ) +@pytest.mark.asyncio +async def test_get_default_service_account_routing_parameters_request_1_grpc_asyncio(): + client = CloudBuildAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + cloudbuild.DefaultServiceAccount( + name="name_value", + service_account_email="service_account_email_value", + ) + ) + await client.get_default_service_account( + request={"name": "projects/sample1/locations/sample2/defaultServiceAccount"} + ) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = cloudbuild.GetDefaultServiceAccountRequest( + **{"name": "projects/sample1/locations/sample2/defaultServiceAccount"} + ) + + assert args[0] == request_msg + + expected_headers = {"location": "sample2"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + def test_transport_kind_rest(): transport = CloudBuildClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -12773,6 +13444,11 @@ def test_create_build_rest_call_success(request_type): "object_": "object__value", "generation": 1068, }, + "connected_repository": { + "repository": "repository_value", + "dir_": "dir__value", + "revision": "revision_value", + }, }, "steps": [ { @@ -12801,7 +13477,12 @@ def test_create_build_rest_call_success(request_type): ], "results": { "images": [ - {"name": "name_value", "digest": "digest_value", "push_timing": {}} + { + "name": "name_value", + "digest": "digest_value", + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], "build_step_images": [ "build_step_images_value1", @@ -12821,14 +13502,32 @@ def test_create_build_rest_call_success(request_type): "file_hash": [{"type_": 1, "value": b"value_blob"}] }, "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", } ], "maven_artifacts": [ - {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } + ], + "go_modules": [ + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], - "go_modules": [{"uri": "uri_value", "file_hashes": {}, "push_timing": {}}], "npm_packages": [ - {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], }, "create_time": {}, @@ -12878,6 +13577,8 @@ def test_create_build_rest_call_success(request_type): "resolved_storage_source": {}, "resolved_repo_source": {}, "resolved_storage_source_manifest": {}, + "resolved_connected_repository": {}, + "resolved_git_source": {}, "file_hashes": {}, }, "build_trigger_id": "build_trigger_id_value", @@ -13843,6 +14544,11 @@ def test_create_build_trigger_rest_call_success(request_type): "object_": "object__value", "generation": 1068, }, + "connected_repository": { + "repository": "repository_value", + "dir_": "dir__value", + "revision": "revision_value", + }, }, "steps": [ { @@ -13871,7 +14577,12 @@ def test_create_build_trigger_rest_call_success(request_type): ], "results": { "images": [ - {"name": "name_value", "digest": "digest_value", "push_timing": {}} + { + "name": "name_value", + "digest": "digest_value", + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], "build_step_images": [ "build_step_images_value1", @@ -13891,16 +14602,32 @@ def test_create_build_trigger_rest_call_success(request_type): "file_hash": [{"type_": 1, "value": b"value_blob"}] }, "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", } ], "maven_artifacts": [ - {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], "go_modules": [ - {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], "npm_packages": [ - {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], }, "create_time": {}, @@ -13953,6 +14680,8 @@ def test_create_build_trigger_rest_call_success(request_type): "resolved_storage_source": {}, "resolved_repo_source": {}, "resolved_storage_source_manifest": {}, + "resolved_connected_repository": {}, + "resolved_git_source": {}, "file_hashes": {}, }, "build_trigger_id": "build_trigger_id_value", @@ -14711,6 +15440,11 @@ def test_update_build_trigger_rest_call_success(request_type): "object_": "object__value", "generation": 1068, }, + "connected_repository": { + "repository": "repository_value", + "dir_": "dir__value", + "revision": "revision_value", + }, }, "steps": [ { @@ -14739,7 +15473,12 @@ def test_update_build_trigger_rest_call_success(request_type): ], "results": { "images": [ - {"name": "name_value", "digest": "digest_value", "push_timing": {}} + { + "name": "name_value", + "digest": "digest_value", + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], "build_step_images": [ "build_step_images_value1", @@ -14759,16 +15498,32 @@ def test_update_build_trigger_rest_call_success(request_type): "file_hash": [{"type_": 1, "value": b"value_blob"}] }, "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", } ], "maven_artifacts": [ - {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], "go_modules": [ - {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], "npm_packages": [ - {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + { + "uri": "uri_value", + "file_hashes": {}, + "push_timing": {}, + "artifact_registry_package": "artifact_registry_package_value", + } ], }, "create_time": {}, @@ -14821,6 +15576,8 @@ def test_update_build_trigger_rest_call_success(request_type): "resolved_storage_source": {}, "resolved_repo_source": {}, "resolved_storage_source_manifest": {}, + "resolved_connected_repository": {}, + "resolved_git_source": {}, "file_hashes": {}, }, "build_trigger_id": "build_trigger_id_value", @@ -16326,6 +17083,138 @@ def test_list_worker_pools_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_get_default_service_account_rest_bad_request( + request_type=cloudbuild.GetDefaultServiceAccountRequest, +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/defaultServiceAccount"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_default_service_account(request) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.GetDefaultServiceAccountRequest, + dict, + ], +) +def test_get_default_service_account_rest_call_success(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/defaultServiceAccount"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.DefaultServiceAccount( + name="name_value", + service_account_email="service_account_email_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloudbuild.DefaultServiceAccount.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_default_service_account(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.DefaultServiceAccount) + assert response.name == "name_value" + assert response.service_account_email == "service_account_email_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_default_service_account_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_get_default_service_account" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, + "post_get_default_service_account_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_get_default_service_account" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloudbuild.GetDefaultServiceAccountRequest.pb( + cloudbuild.GetDefaultServiceAccountRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloudbuild.DefaultServiceAccount.to_json( + cloudbuild.DefaultServiceAccount() + ) + req.return_value.content = return_value + + request = cloudbuild.GetDefaultServiceAccountRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.DefaultServiceAccount() + post_with_metadata.return_value = cloudbuild.DefaultServiceAccount(), metadata + + client.get_default_service_account( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -16715,6 +17604,28 @@ def test_list_worker_pools_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_default_service_account_empty_call_rest(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + client.get_default_service_account(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloudbuild.GetDefaultServiceAccountRequest() + + assert args[0] == request_msg + + def test_create_build_routing_parameters_request_1_rest(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17206,6 +18117,35 @@ def test_list_worker_pools_routing_parameters_request_1_rest(): ) +def test_get_default_service_account_routing_parameters_request_1_rest(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_default_service_account), "__call__" + ) as call: + client.get_default_service_account( + request={"name": "projects/sample1/locations/sample2/defaultServiceAccount"} + ) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, kw = call.mock_calls[0] + request_msg = cloudbuild.GetDefaultServiceAccountRequest( + **{"name": "projects/sample1/locations/sample2/defaultServiceAccount"} + ) + + assert args[0] == request_msg + + expected_headers = {"location": "sample2"} + assert ( + gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw["metadata"] + ) + + def test_cloud_build_rest_lro_client(): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17274,6 +18214,7 @@ def test_cloud_build_base_transport(): "delete_worker_pool", "update_worker_pool", "list_worker_pools", + "get_default_service_account", ) for method in methods: with pytest.raises(NotImplementedError): @@ -17588,6 +18529,9 @@ def test_cloud_build_client_transport_session_collision(transport_name): session1 = client1.transport.list_worker_pools._session session2 = client2.transport.list_worker_pools._session assert session1 != session2 + session1 = client1.transport.get_default_service_account._session + session2 = client2.transport.get_default_service_account._session + assert session1 != session2 def test_cloud_build_grpc_transport_channel(): @@ -17817,9 +18761,32 @@ def test_parse_crypto_key_path(): assert expected == actual -def test_github_enterprise_config_path(): +def test_default_service_account_path(): project = "oyster" - config = "nudibranch" + location = "nudibranch" + expected = "projects/{project}/locations/{location}/defaultServiceAccount".format( + project=project, + location=location, + ) + actual = CloudBuildClient.default_service_account_path(project, location) + assert expected == actual + + +def test_parse_default_service_account_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = CloudBuildClient.default_service_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_default_service_account_path(path) + assert expected == actual + + +def test_github_enterprise_config_path(): + project = "winkle" + config = "nautilus" expected = "projects/{project}/githubEnterpriseConfigs/{config}".format( project=project, config=config, @@ -17830,8 +18797,8 @@ def test_github_enterprise_config_path(): def test_parse_github_enterprise_config_path(): expected = { - "project": "cuttlefish", - "config": "mussel", + "project": "scallop", + "config": "abalone", } path = CloudBuildClient.github_enterprise_config_path(**expected) @@ -17841,10 +18808,10 @@ def test_parse_github_enterprise_config_path(): def test_git_repository_link_path(): - project = "winkle" - location = "nautilus" - connection = "scallop" - git_repository_link = "abalone" + project = "squid" + location = "clam" + connection = "whelk" + git_repository_link = "octopus" expected = "projects/{project}/locations/{location}/connections/{connection}/gitRepositoryLinks/{git_repository_link}".format( project=project, location=location, @@ -17859,10 +18826,10 @@ def test_git_repository_link_path(): def test_parse_git_repository_link_path(): expected = { - "project": "squid", - "location": "clam", - "connection": "whelk", - "git_repository_link": "octopus", + "project": "oyster", + "location": "nudibranch", + "connection": "cuttlefish", + "git_repository_link": "mussel", } path = CloudBuildClient.git_repository_link_path(**expected) @@ -17872,8 +18839,8 @@ def test_parse_git_repository_link_path(): def test_network_path(): - project = "oyster" - network = "nudibranch" + project = "winkle" + network = "nautilus" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -17884,8 +18851,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "cuttlefish", - "network": "mussel", + "project": "scallop", + "network": "abalone", } path = CloudBuildClient.network_path(**expected) @@ -17895,9 +18862,9 @@ def test_parse_network_path(): def test_network_attachment_path(): - project = "winkle" - region = "nautilus" - networkattachment = "scallop" + project = "squid" + region = "clam" + networkattachment = "whelk" expected = "projects/{project}/regions/{region}/networkAttachments/{networkattachment}".format( project=project, region=region, @@ -17911,9 +18878,9 @@ def test_network_attachment_path(): def test_parse_network_attachment_path(): expected = { - "project": "abalone", - "region": "squid", - "networkattachment": "clam", + "project": "octopus", + "region": "oyster", + "networkattachment": "nudibranch", } path = CloudBuildClient.network_attachment_path(**expected) @@ -17923,10 +18890,10 @@ def test_parse_network_attachment_path(): def test_repository_path(): - project = "whelk" - location = "octopus" - connection = "oyster" - repository = "nudibranch" + project = "cuttlefish" + location = "mussel" + connection = "winkle" + repository = "nautilus" expected = "projects/{project}/locations/{location}/connections/{connection}/repositories/{repository}".format( project=project, location=location, @@ -17939,10 +18906,10 @@ def test_repository_path(): def test_parse_repository_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "connection": "winkle", - "repository": "nautilus", + "project": "scallop", + "location": "abalone", + "connection": "squid", + "repository": "clam", } path = CloudBuildClient.repository_path(**expected) @@ -17952,9 +18919,9 @@ def test_parse_repository_path(): def test_secret_version_path(): - project = "scallop" - secret = "abalone" - version = "squid" + project = "whelk" + secret = "octopus" + version = "oyster" expected = "projects/{project}/secrets/{secret}/versions/{version}".format( project=project, secret=secret, @@ -17966,9 +18933,9 @@ def test_secret_version_path(): def test_parse_secret_version_path(): expected = { - "project": "clam", - "secret": "whelk", - "version": "octopus", + "project": "nudibranch", + "secret": "cuttlefish", + "version": "mussel", } path = CloudBuildClient.secret_version_path(**expected) @@ -17978,8 +18945,8 @@ def test_parse_secret_version_path(): def test_service_account_path(): - project = "oyster" - service_account = "nudibranch" + project = "winkle" + service_account = "nautilus" expected = "projects/{project}/serviceAccounts/{service_account}".format( project=project, service_account=service_account, @@ -17990,8 +18957,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "cuttlefish", - "service_account": "mussel", + "project": "scallop", + "service_account": "abalone", } path = CloudBuildClient.service_account_path(**expected) @@ -18001,8 +18968,8 @@ def test_parse_service_account_path(): def test_subscription_path(): - project = "winkle" - subscription = "nautilus" + project = "squid" + subscription = "clam" expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, @@ -18013,8 +18980,8 @@ def test_subscription_path(): def test_parse_subscription_path(): expected = { - "project": "scallop", - "subscription": "abalone", + "project": "whelk", + "subscription": "octopus", } path = CloudBuildClient.subscription_path(**expected) @@ -18024,8 +18991,8 @@ def test_parse_subscription_path(): def test_topic_path(): - project = "squid" - topic = "clam" + project = "oyster" + topic = "nudibranch" expected = "projects/{project}/topics/{topic}".format( project=project, topic=topic, @@ -18036,8 +19003,8 @@ def test_topic_path(): def test_parse_topic_path(): expected = { - "project": "whelk", - "topic": "octopus", + "project": "cuttlefish", + "topic": "mussel", } path = CloudBuildClient.topic_path(**expected) @@ -18047,9 +19014,9 @@ def test_parse_topic_path(): def test_worker_pool_path(): - project = "oyster" - location = "nudibranch" - worker_pool = "cuttlefish" + project = "winkle" + location = "nautilus" + worker_pool = "scallop" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -18063,9 +19030,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "mussel", - "location": "winkle", - "worker_pool": "nautilus", + "project": "abalone", + "location": "squid", + "worker_pool": "clam", } path = CloudBuildClient.worker_pool_path(**expected) @@ -18075,7 +19042,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -18085,7 +19052,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "octopus", } path = CloudBuildClient.common_billing_account_path(**expected) @@ -18095,7 +19062,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "squid" + folder = "oyster" expected = "folders/{folder}".format( folder=folder, ) @@ -18105,7 +19072,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "nudibranch", } path = CloudBuildClient.common_folder_path(**expected) @@ -18115,7 +19082,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "whelk" + organization = "cuttlefish" expected = "organizations/{organization}".format( organization=organization, ) @@ -18125,7 +19092,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "mussel", } path = CloudBuildClient.common_organization_path(**expected) @@ -18135,7 +19102,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "oyster" + project = "winkle" expected = "projects/{project}".format( project=project, ) @@ -18145,7 +19112,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "nautilus", } path = CloudBuildClient.common_project_path(**expected) @@ -18155,8 +19122,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -18167,8 +19134,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "squid", + "location": "clam", } path = CloudBuildClient.common_location_path(**expected) diff --git a/packages/google-cloud-channel/google/cloud/channel/__init__.py b/packages/google-cloud-channel/google/cloud/channel/__init__.py index bf847445cec7..98e90ea1af76 100644 --- a/packages/google-cloud-channel/google/cloud/channel/__init__.py +++ b/packages/google-cloud-channel/google/cloud/channel/__init__.py @@ -58,6 +58,8 @@ from google.cloud.channel_v1.types.offers import ( Constraints, CustomerConstraints, + DiscountComponent, + DiscountType, Offer, ParameterDefinition, PaymentPlan, @@ -220,6 +222,7 @@ "TrialSettings", "Constraints", "CustomerConstraints", + "DiscountComponent", "Offer", "ParameterDefinition", "Period", @@ -228,6 +231,7 @@ "PriceByResource", "PricePhase", "PriceTier", + "DiscountType", "PaymentPlan", "PaymentType", "PeriodType", diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/__init__.py b/packages/google-cloud-channel/google/cloud/channel_v1/__init__.py index 3307089c9853..7fb952f3277a 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/__init__.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/__init__.py @@ -49,6 +49,8 @@ from .types.offers import ( Constraints, CustomerConstraints, + DiscountComponent, + DiscountType, Offer, ParameterDefinition, PaymentPlan, @@ -215,6 +217,8 @@ "DeleteChannelPartnerRepricingConfigRequest", "DeleteCustomerRepricingConfigRequest", "DeleteCustomerRequest", + "DiscountComponent", + "DiscountType", "EduData", "Entitlement", "EntitlementChange", diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py index 03e467605b63..4bb20a057ecf 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/async_client.py @@ -114,6 +114,8 @@ class CloudChannelServiceAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = CloudChannelServiceClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = CloudChannelServiceClient._DEFAULT_UNIVERSE + account_path = staticmethod(CloudChannelServiceClient.account_path) + parse_account_path = staticmethod(CloudChannelServiceClient.parse_account_path) billing_account_path = staticmethod(CloudChannelServiceClient.billing_account_path) parse_billing_account_path = staticmethod( CloudChannelServiceClient.parse_billing_account_path @@ -1958,7 +1960,7 @@ async def sample_change_parameters(): Args: request (Optional[Union[google.cloud.channel_v1.types.ChangeParametersRequest, dict]]): The request object. Request message for - [CloudChannelService.ChangeParametersRequest][]. + [CloudChannelService.ChangeParameters][google.cloud.channel.v1.CloudChannelService.ChangeParameters]. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -6054,8 +6056,8 @@ async def register_subscriber( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> service.RegisterSubscriberResponse: r"""Registers a service account with subscriber privileges on the - Cloud Pub/Sub topic for this Channel Services account. After you - create a subscriber, you get the events through + Pub/Sub topic for this Channel Services account or integrator. + After you create a subscriber, you get the events through [SubscriberEvent][google.cloud.channel.v1.SubscriberEvent] Possible error codes: @@ -6090,7 +6092,6 @@ async def sample_register_subscriber(): # Initialize request argument(s) request = channel_v1.RegisterSubscriberRequest( - account="account_value", service_account="service_account_value", ) @@ -6159,10 +6160,10 @@ async def unregister_subscriber( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> service.UnregisterSubscriberResponse: r"""Unregisters a service account with subscriber privileges on the - Cloud Pub/Sub topic created for this Channel Services account. - If there are no service accounts left with subscriber - privileges, this deletes the topic. You can call ListSubscribers - to check for these accounts. + Pub/Sub topic created for this Channel Services account or + integrator. If there are no service accounts left with + subscriber privileges, this deletes the topic. You can call + ListSubscribers to check for these accounts. Possible error codes: @@ -6198,7 +6199,6 @@ async def sample_unregister_subscriber(): # Initialize request argument(s) request = channel_v1.UnregisterSubscriberRequest( - account="account_value", service_account="service_account_value", ) @@ -6266,8 +6266,8 @@ async def list_subscribers( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSubscribersAsyncPager: - r"""Lists service accounts with subscriber privileges on the Cloud - Pub/Sub topic created for this Channel Services account. + r"""Lists service accounts with subscriber privileges on the Pub/Sub + topic created for this Channel Services account or integrator. Possible error codes: @@ -6301,7 +6301,6 @@ async def sample_list_subscribers(): # Initialize request argument(s) request = channel_v1.ListSubscribersRequest( - account="account_value", ) # Make the request diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py index c8672311a765..19de28e38c7e 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/client.py @@ -235,6 +235,21 @@ def transport(self) -> CloudChannelServiceTransport: """ return self._transport + @staticmethod + def account_path( + account: str, + ) -> str: + """Returns a fully-qualified account string.""" + return "accounts/{account}".format( + account=account, + ) + + @staticmethod + def parse_account_path(path: str) -> Dict[str, str]: + """Parses a account path into its component segments.""" + m = re.match(r"^accounts/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def billing_account_path( account: str, @@ -2489,7 +2504,7 @@ def sample_change_parameters(): Args: request (Union[google.cloud.channel_v1.types.ChangeParametersRequest, dict]): The request object. Request message for - [CloudChannelService.ChangeParametersRequest][]. + [CloudChannelService.ChangeParameters][google.cloud.channel.v1.CloudChannelService.ChangeParameters]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -6545,8 +6560,8 @@ def register_subscriber( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> service.RegisterSubscriberResponse: r"""Registers a service account with subscriber privileges on the - Cloud Pub/Sub topic for this Channel Services account. After you - create a subscriber, you get the events through + Pub/Sub topic for this Channel Services account or integrator. + After you create a subscriber, you get the events through [SubscriberEvent][google.cloud.channel.v1.SubscriberEvent] Possible error codes: @@ -6581,7 +6596,6 @@ def sample_register_subscriber(): # Initialize request argument(s) request = channel_v1.RegisterSubscriberRequest( - account="account_value", service_account="service_account_value", ) @@ -6648,10 +6662,10 @@ def unregister_subscriber( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> service.UnregisterSubscriberResponse: r"""Unregisters a service account with subscriber privileges on the - Cloud Pub/Sub topic created for this Channel Services account. - If there are no service accounts left with subscriber - privileges, this deletes the topic. You can call ListSubscribers - to check for these accounts. + Pub/Sub topic created for this Channel Services account or + integrator. If there are no service accounts left with + subscriber privileges, this deletes the topic. You can call + ListSubscribers to check for these accounts. Possible error codes: @@ -6687,7 +6701,6 @@ def sample_unregister_subscriber(): # Initialize request argument(s) request = channel_v1.UnregisterSubscriberRequest( - account="account_value", service_account="service_account_value", ) @@ -6753,8 +6766,8 @@ def list_subscribers( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSubscribersPager: - r"""Lists service accounts with subscriber privileges on the Cloud - Pub/Sub topic created for this Channel Services account. + r"""Lists service accounts with subscriber privileges on the Pub/Sub + topic created for this Channel Services account or integrator. Possible error codes: @@ -6788,7 +6801,6 @@ def sample_list_subscribers(): # Initialize request argument(s) request = channel_v1.ListSubscribersRequest( - account="account_value", ) # Make the request diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc.py index 5097fa01e0c7..0aa0307b1311 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc.py @@ -2595,8 +2595,8 @@ def register_subscriber( r"""Return a callable for the register subscriber method over gRPC. Registers a service account with subscriber privileges on the - Cloud Pub/Sub topic for this Channel Services account. After you - create a subscriber, you get the events through + Pub/Sub topic for this Channel Services account or integrator. + After you create a subscriber, you get the events through [SubscriberEvent][google.cloud.channel.v1.SubscriberEvent] Possible error codes: @@ -2641,10 +2641,10 @@ def unregister_subscriber( r"""Return a callable for the unregister subscriber method over gRPC. Unregisters a service account with subscriber privileges on the - Cloud Pub/Sub topic created for this Channel Services account. - If there are no service accounts left with subscriber - privileges, this deletes the topic. You can call ListSubscribers - to check for these accounts. + Pub/Sub topic created for this Channel Services account or + integrator. If there are no service accounts left with + subscriber privileges, this deletes the topic. You can call + ListSubscribers to check for these accounts. Possible error codes: @@ -2687,8 +2687,8 @@ def list_subscribers( ) -> Callable[[service.ListSubscribersRequest], service.ListSubscribersResponse]: r"""Return a callable for the list subscribers method over gRPC. - Lists service accounts with subscriber privileges on the Cloud - Pub/Sub topic created for this Channel Services account. + Lists service accounts with subscriber privileges on the Pub/Sub + topic created for this Channel Services account or integrator. Possible error codes: diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc_asyncio.py b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc_asyncio.py index 453bdde0704e..942863535f24 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/services/cloud_channel_service/transports/grpc_asyncio.py @@ -2641,8 +2641,8 @@ def register_subscriber( r"""Return a callable for the register subscriber method over gRPC. Registers a service account with subscriber privileges on the - Cloud Pub/Sub topic for this Channel Services account. After you - create a subscriber, you get the events through + Pub/Sub topic for this Channel Services account or integrator. + After you create a subscriber, you get the events through [SubscriberEvent][google.cloud.channel.v1.SubscriberEvent] Possible error codes: @@ -2688,10 +2688,10 @@ def unregister_subscriber( r"""Return a callable for the unregister subscriber method over gRPC. Unregisters a service account with subscriber privileges on the - Cloud Pub/Sub topic created for this Channel Services account. - If there are no service accounts left with subscriber - privileges, this deletes the topic. You can call ListSubscribers - to check for these accounts. + Pub/Sub topic created for this Channel Services account or + integrator. If there are no service accounts left with + subscriber privileges, this deletes the topic. You can call + ListSubscribers to check for these accounts. Possible error codes: @@ -2736,8 +2736,8 @@ def list_subscribers( ]: r"""Return a callable for the list subscribers method over gRPC. - Lists service accounts with subscriber privileges on the Cloud - Pub/Sub topic created for this Channel Services account. + Lists service accounts with subscriber privileges on the Pub/Sub + topic created for this Channel Services account or integrator. Possible error codes: diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/__init__.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/__init__.py index 018a04668ebb..51d7b4f5feca 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/__init__.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/__init__.py @@ -36,6 +36,8 @@ from .offers import ( Constraints, CustomerConstraints, + DiscountComponent, + DiscountType, Offer, ParameterDefinition, PaymentPlan, @@ -184,6 +186,7 @@ "TrialSettings", "Constraints", "CustomerConstraints", + "DiscountComponent", "Offer", "ParameterDefinition", "Period", @@ -192,6 +195,7 @@ "PriceByResource", "PricePhase", "PriceTier", + "DiscountType", "PaymentPlan", "PaymentType", "PeriodType", diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/customers.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/customers.py index f61875dd3ad5..969a16ff4a0e 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/customers.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/customers.py @@ -89,15 +89,13 @@ class Customer(proto.Message): Populated only if a CRM ID exists for this customer. customer_attestation_state (google.cloud.channel_v1.types.Customer.CustomerAttestationState): - Optional. Indicate whether a customer is - attesting about the correctness of provided - information. Only required if creating a GCP - Entitlement. + Optional. Indicate if a customer is attesting + about the correctness of provided information. + Only required if creating a GCP Entitlement. """ class CustomerAttestationState(proto.Enum): - r"""The enum represents whether a customer belongs to public - sector + r"""The enum represents if a customer belongs to public sector Values: CUSTOMER_ATTESTATION_STATE_UNSPECIFIED (0): diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/entitlements.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/entitlements.py index 7d1d0a5d5f75..91251d925285 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/entitlements.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/entitlements.py @@ -111,6 +111,12 @@ class Entitlement(proto.Message): billing_account (str): Optional. The billing account resource name that is used to pay for this entitlement. + price_reference_id (str): + Optional. Price reference ID for the offer. + Only for offers that require additional price + information. Used to guarantee that the pricing + is consistent between quoting the offer and + placing the order. """ class ProvisioningState(proto.Enum): @@ -218,6 +224,10 @@ class SuspensionReason(proto.Enum): proto.STRING, number=28, ) + price_reference_id: str = proto.Field( + proto.STRING, + number=29, + ) class Parameter(proto.Message): diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/offers.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/offers.py index 01b37a135668..53945957d8f9 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/offers.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/offers.py @@ -31,6 +31,7 @@ "PaymentType", "ResourceType", "PeriodType", + "DiscountType", "Offer", "ParameterDefinition", "Constraints", @@ -41,6 +42,7 @@ "PricePhase", "PriceTier", "Period", + "DiscountComponent", }, ) @@ -170,6 +172,31 @@ class PeriodType(proto.Enum): YEAR = 3 +class DiscountType(proto.Enum): + r"""Discount Type. + + Values: + DISCOUNT_TYPE_UNSPECIFIED (0): + Not used. + REGIONAL_DISCOUNT (1): + Regional discount. + PROMOTIONAL_DISCOUNT (2): + Promotional discount. + SALES_DISCOUNT (3): + Sales-provided discount. + RESELLER_MARGIN (4): + Reseller margin. + DEAL_CODE (5): + Deal code discount. + """ + DISCOUNT_TYPE_UNSPECIFIED = 0 + REGIONAL_DISCOUNT = 1 + PROMOTIONAL_DISCOUNT = 2 + SALES_DISCOUNT = 3 + RESELLER_MARGIN = 4 + DEAL_CODE = 5 + + class Offer(proto.Message): r"""Represents an offer made to resellers for purchase. An offer is associated with a [Sku][google.cloud.channel.v1.Sku], has a plan for @@ -478,9 +505,17 @@ class Price(proto.Message): 0.2. effective_price (google.type.money_pb2.Money): Effective Price after applying the discounts. + price_period (google.cloud.channel_v1.types.Period): + The time period with respect to which base + and effective prices are defined. + Example: 1 month, 6 months, 1 year, etc. external_price_uri (str): Link to external price list, such as link to Google Voice rate card. + discount_components (MutableSequence[google.cloud.channel_v1.types.DiscountComponent]): + Breakdown of the discount into its + components. This will be empty if there is no + discount present. """ base_price: money_pb2.Money = proto.Field( @@ -497,10 +532,20 @@ class Price(proto.Message): number=3, message=money_pb2.Money, ) + price_period: "Period" = proto.Field( + proto.MESSAGE, + number=6, + message="Period", + ) external_price_uri: str = proto.Field( proto.STRING, number=4, ) + discount_components: MutableSequence["DiscountComponent"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="DiscountComponent", + ) class PricePhase(proto.Message): @@ -604,4 +649,48 @@ class Period(proto.Message): ) +class DiscountComponent(proto.Message): + r"""Represents a single component of the total discount + applicable on a Price. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + discount_percentage (float): + Discount percentage, represented as decimal. + For example, a 20% discount will be represented + as 0.2. + + This field is a member of `oneof`_ ``discount_value``. + discount_absolute (google.type.money_pb2.Money): + Fixed value discount. + + This field is a member of `oneof`_ ``discount_value``. + discount_type (google.cloud.channel_v1.types.DiscountType): + Type of the discount. + """ + + discount_percentage: float = proto.Field( + proto.DOUBLE, + number=3, + oneof="discount_value", + ) + discount_absolute: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=4, + oneof="discount_value", + message=money_pb2.Money, + ) + discount_type: "DiscountType" = proto.Field( + proto.ENUM, + number=2, + enum="DiscountType", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/repricing.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/repricing.py index b12d7ce33df2..4dea3dff405d 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/repricing.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/repricing.py @@ -144,8 +144,8 @@ class RepricingConfig(proto.Message): Attributes: entitlement_granularity (google.cloud.channel_v1.types.RepricingConfig.EntitlementGranularity): - Applies the repricing configuration at the entitlement - level. + Required. Applies the repricing configuration at the + entitlement level. Note: If a [ChannelPartnerRepricingConfig][google.cloud.channel.v1.ChannelPartnerRepricingConfig] diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py index 7bd9c472e3a9..1aab0f0820b0 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/service.py @@ -160,7 +160,8 @@ class CloudIdentityCustomerAccount(proto.Message): owned (bool): Returns true if the Cloud Identity account is associated with a customer of the Channel - Services partner. + Services partner (with active subscriptions or + purchase consents). customer_name (str): If owned = true, the name of the customer that owns the Cloud Identity account. Customer_name uses the format: @@ -812,6 +813,12 @@ class TransferableOffer(proto.Message): offer (google.cloud.channel_v1.types.Offer): Offer with parameter constraints updated to allow the Transfer. + price_reference_id (str): + Optional. Price reference ID for the offer. + Only for offers that require additional price + information. Used to guarantee that the pricing + is consistent between quoting the offer and + placing the order. """ offer: gcc_offers.Offer = proto.Field( @@ -819,6 +826,10 @@ class TransferableOffer(proto.Message): number=1, message=gcc_offers.Offer, ) + price_reference_id: str = proto.Field( + proto.STRING, + number=2, + ) class GetEntitlementRequest(proto.Message): @@ -1343,7 +1354,8 @@ class ListSkuGroupsRequest(proto.Message): page_token (str): Optional. A token identifying a page of results beyond the first page. Obtained through - [ListSkuGroups.next_page_token][] of the previous + [ListSkuGroupsResponse.next_page_token][google.cloud.channel.v1.ListSkuGroupsResponse.next_page_token] + of the previous [CloudChannelService.ListSkuGroups][google.cloud.channel.v1.CloudChannelService.ListSkuGroups] call. """ @@ -1378,7 +1390,8 @@ class ListSkuGroupBillableSkusRequest(proto.Message): page_token (str): Optional. A token identifying a page of results beyond the first page. Obtained through - [ListSkuGroupBillableSkus.next_page_token][] of the previous + [ListSkuGroupBillableSkusResponse.next_page_token][google.cloud.channel.v1.ListSkuGroupBillableSkusResponse.next_page_token] + of the previous [CloudChannelService.ListSkuGroupBillableSkus][google.cloud.channel.v1.CloudChannelService.ListSkuGroupBillableSkus] call. """ @@ -1405,7 +1418,8 @@ class ListSkuGroupsResponse(proto.Message): The list of SKU groups requested. next_page_token (str): A token to retrieve the next page of results. Pass to - [ListSkuGroups.page_token][] to obtain that page. + [ListSkuGroupsRequest.page_token][google.cloud.channel.v1.ListSkuGroupsRequest.page_token] + to obtain that page. """ @property @@ -1432,7 +1446,8 @@ class ListSkuGroupBillableSkusResponse(proto.Message): SKU group. next_page_token (str): A token to retrieve the next page of results. Pass to - [ListSkuGroupBillableSkus.page_token][] to obtain that page. + [ListSkuGroupBillableSkusRequest.page_token][google.cloud.channel.v1.ListSkuGroupBillableSkusRequest.page_token] + to obtain that page. """ @property @@ -1676,7 +1691,8 @@ class TransferEntitlementsToGoogleRequest(proto.Message): class ChangeParametersRequest(proto.Message): - r"""Request message for [CloudChannelService.ChangeParametersRequest][]. + r"""Request message for + [CloudChannelService.ChangeParameters][google.cloud.channel.v1.CloudChannelService.ChangeParameters]. Attributes: name (str): @@ -1815,6 +1831,12 @@ class ChangeOfferRequest(proto.Message): This field is only relevant for multi-currency accounts. It should be left empty for single currency accounts. + price_reference_id (str): + Optional. Price reference ID for the offer. + Only for offers that require additional price + information. Used to guarantee that the pricing + is consistent between quoting the offer and + placing the order. """ name: str = proto.Field( @@ -1842,6 +1864,10 @@ class ChangeOfferRequest(proto.Message): proto.STRING, number=7, ) + price_reference_id: str = proto.Field( + proto.STRING, + number=8, + ) class StartPaidServiceRequest(proto.Message): @@ -2212,6 +2238,10 @@ class ListOffersResponse(proto.Message): Attributes: offers (MutableSequence[google.cloud.channel_v1.types.Offer]): The list of Offers requested. + + The pricing information for each Offer only + includes the base price. Effective prices and + discounts aren't populated. next_page_token (str): A token to retrieve the next page of results. """ @@ -2554,6 +2584,12 @@ class PurchasableOffer(proto.Message): Attributes: offer (google.cloud.channel_v1.types.Offer): Offer. + price_reference_id (str): + Optional. Price reference ID for the offer. + Only for offers that require additional price + information. Used to guarantee that the pricing + is consistent between quoting the offer and + placing the order. """ offer: gcc_offers.Offer = proto.Field( @@ -2561,6 +2597,10 @@ class PurchasableOffer(proto.Message): number=1, message=gcc_offers.Offer, ) + price_reference_id: str = proto.Field( + proto.STRING, + number=2, + ) class QueryEligibleBillingAccountsRequest(proto.Message): @@ -2651,12 +2691,22 @@ class BillingAccountPurchaseInfo(proto.Message): class RegisterSubscriberRequest(proto.Message): r"""Request Message for RegisterSubscriber. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: account (str): - Required. Resource name of the account. + Optional. Resource name of the account. + Required if integrator is not provided. + Otherwise, leave this field empty/unset. service_account (str): Required. Service account that provides subscriber access to the registered topic. + integrator (str): + Optional. Resource name of the integrator. + Required if account is not provided. Otherwise, + leave this field empty/unset. + + This field is a member of `oneof`_ ``_integrator``. """ account: str = proto.Field( @@ -2667,6 +2717,11 @@ class RegisterSubscriberRequest(proto.Message): proto.STRING, number=2, ) + integrator: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) class RegisterSubscriberResponse(proto.Message): @@ -2687,12 +2742,22 @@ class RegisterSubscriberResponse(proto.Message): class UnregisterSubscriberRequest(proto.Message): r"""Request Message for UnregisterSubscriber. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: account (str): - Required. Resource name of the account. + Optional. Resource name of the account. + Required if integrator is not provided. + Otherwise, leave this field empty/unset. service_account (str): Required. Service account to unregister from subscriber access to the topic. + integrator (str): + Optional. Resource name of the integrator. + Required if account is not provided. Otherwise, + leave this field empty/unset. + + This field is a member of `oneof`_ ``_integrator``. """ account: str = proto.Field( @@ -2703,6 +2768,11 @@ class UnregisterSubscriberRequest(proto.Message): proto.STRING, number=2, ) + integrator: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) class UnregisterSubscriberResponse(proto.Message): @@ -2723,9 +2793,13 @@ class UnregisterSubscriberResponse(proto.Message): class ListSubscribersRequest(proto.Message): r"""Request Message for ListSubscribers. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: account (str): - Required. Resource name of the account. + Optional. Resource name of the account. + Required if integrator is not provided. + Otherwise, leave this field empty/unset. page_size (int): Optional. The maximum number of service accounts to return. The service may return fewer @@ -2740,6 +2814,12 @@ class ListSubscribersRequest(proto.Message): When paginating, all other parameters provided to ``ListSubscribers`` must match the call that provided the page token. + integrator (str): + Optional. Resource name of the integrator. + Required if account is not provided. Otherwise, + leave this field empty/unset. + + This field is a member of `oneof`_ ``_integrator``. """ account: str = proto.Field( @@ -2754,6 +2834,11 @@ class ListSubscribersRequest(proto.Message): proto.STRING, number=3, ) + integrator: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) class ListSubscribersResponse(proto.Message): diff --git a/packages/google-cloud-channel/google/cloud/channel_v1/types/subscriber_event.py b/packages/google-cloud-channel/google/cloud/channel_v1/types/subscriber_event.py index b5a6deb808cc..4c08d87fd64f 100644 --- a/packages/google-cloud-channel/google/cloud/channel_v1/types/subscriber_event.py +++ b/packages/google-cloud-channel/google/cloud/channel_v1/types/subscriber_event.py @@ -38,7 +38,8 @@ class CustomerEvent(proto.Message): Resource name of the customer. Format: accounts/{account_id}/customers/{customer_id} event_type (google.cloud.channel_v1.types.CustomerEvent.Type): - Type of event which happened on the customer. + Type of event which happened for the + customer. """ class Type(proto.Enum): @@ -77,7 +78,7 @@ class EntitlementEvent(proto.Message): Resource name of an entitlement of the form: accounts/{account_id}/customers/{customer_id}/entitlements/{entitlement_id} event_type (google.cloud.channel_v1.types.EntitlementEvent.Type): - Type of event which happened on the + Type of event which happened for the entitlement. """ diff --git a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_list_subscribers_async.py b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_list_subscribers_async.py index 8ca6f0b5b932..ab06ce19b8aa 100644 --- a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_list_subscribers_async.py +++ b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_list_subscribers_async.py @@ -39,9 +39,7 @@ async def sample_list_subscribers(): client = channel_v1.CloudChannelServiceAsyncClient() # Initialize request argument(s) - request = channel_v1.ListSubscribersRequest( - account="account_value", - ) + request = channel_v1.ListSubscribersRequest() # Make the request page_result = client.list_subscribers(request=request) diff --git a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_list_subscribers_sync.py b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_list_subscribers_sync.py index 754abc126ba5..41dc47d14095 100644 --- a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_list_subscribers_sync.py +++ b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_list_subscribers_sync.py @@ -39,9 +39,7 @@ def sample_list_subscribers(): client = channel_v1.CloudChannelServiceClient() # Initialize request argument(s) - request = channel_v1.ListSubscribersRequest( - account="account_value", - ) + request = channel_v1.ListSubscribersRequest() # Make the request page_result = client.list_subscribers(request=request) diff --git a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_register_subscriber_async.py b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_register_subscriber_async.py index b0c5da78b15a..cf16b4056c83 100644 --- a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_register_subscriber_async.py +++ b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_register_subscriber_async.py @@ -40,7 +40,6 @@ async def sample_register_subscriber(): # Initialize request argument(s) request = channel_v1.RegisterSubscriberRequest( - account="account_value", service_account="service_account_value", ) diff --git a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_register_subscriber_sync.py b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_register_subscriber_sync.py index 907b7532b283..f1d6be16c6e9 100644 --- a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_register_subscriber_sync.py +++ b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_register_subscriber_sync.py @@ -40,7 +40,6 @@ def sample_register_subscriber(): # Initialize request argument(s) request = channel_v1.RegisterSubscriberRequest( - account="account_value", service_account="service_account_value", ) diff --git a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_unregister_subscriber_async.py b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_unregister_subscriber_async.py index 69a15dc15337..7a1ff149b32f 100644 --- a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_unregister_subscriber_async.py +++ b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_unregister_subscriber_async.py @@ -40,7 +40,6 @@ async def sample_unregister_subscriber(): # Initialize request argument(s) request = channel_v1.UnregisterSubscriberRequest( - account="account_value", service_account="service_account_value", ) diff --git a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_unregister_subscriber_sync.py b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_unregister_subscriber_sync.py index a4d58096e7cf..3b8ff75e4753 100644 --- a/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_unregister_subscriber_sync.py +++ b/packages/google-cloud-channel/samples/generated_samples/cloudchannel_v1_generated_cloud_channel_service_unregister_subscriber_sync.py @@ -40,7 +40,6 @@ def sample_unregister_subscriber(): # Initialize request argument(s) request = channel_v1.UnregisterSubscriberRequest( - account="account_value", service_account="service_account_value", ) diff --git a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json index 763f13d200ff..b684d2e0ba4b 100644 --- a/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json +++ b/packages/google-cloud-channel/samples/generated_samples/snippet_metadata_google.cloud.channel.v1.json @@ -5682,12 +5682,12 @@ "regionTag": "cloudchannel_v1_generated_CloudChannelService_ListSubscribers_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -5697,18 +5697,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -5758,12 +5758,12 @@ "regionTag": "cloudchannel_v1_generated_CloudChannelService_ListSubscribers_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -5773,18 +5773,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -6600,12 +6600,12 @@ "regionTag": "cloudchannel_v1_generated_CloudChannelService_RegisterSubscriber_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6615,18 +6615,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -6676,12 +6676,12 @@ "regionTag": "cloudchannel_v1_generated_CloudChannelService_RegisterSubscriber_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -6691,18 +6691,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -7365,12 +7365,12 @@ "regionTag": "cloudchannel_v1_generated_CloudChannelService_UnregisterSubscriber_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -7380,18 +7380,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -7441,12 +7441,12 @@ "regionTag": "cloudchannel_v1_generated_CloudChannelService_UnregisterSubscriber_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -7456,18 +7456,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py index cf78e3f05b95..569a07343c19 100644 --- a/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py +++ b/packages/google-cloud-channel/scripts/fixup_channel_v1_keywords.py @@ -41,7 +41,7 @@ class channelCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'activate_entitlement': ('name', 'request_id', ), 'cancel_entitlement': ('name', 'request_id', ), - 'change_offer': ('name', 'offer', 'parameters', 'purchase_order_id', 'request_id', 'billing_account', ), + 'change_offer': ('name', 'offer', 'parameters', 'purchase_order_id', 'request_id', 'billing_account', 'price_reference_id', ), 'change_parameters': ('name', 'parameters', 'request_id', 'purchase_order_id', ), 'change_renewal_settings': ('name', 'renewal_settings', 'request_id', ), 'check_cloud_identity_accounts_exist': ('parent', 'domain', 'primary_admin_email', ), @@ -74,19 +74,19 @@ class channelCallTransformer(cst.CSTTransformer): 'list_sku_group_billable_skus': ('parent', 'page_size', 'page_token', ), 'list_sku_groups': ('parent', 'page_size', 'page_token', ), 'list_skus': ('parent', 'account', 'page_size', 'page_token', 'language_code', ), - 'list_subscribers': ('account', 'page_size', 'page_token', ), + 'list_subscribers': ('account', 'page_size', 'page_token', 'integrator', ), 'list_transferable_offers': ('parent', 'sku', 'cloud_identity_id', 'customer_name', 'page_size', 'page_token', 'language_code', 'billing_account', ), 'list_transferable_skus': ('parent', 'cloud_identity_id', 'customer_name', 'page_size', 'page_token', 'auth_token', 'language_code', ), 'lookup_offer': ('entitlement', ), 'provision_cloud_identity': ('customer', 'cloud_identity_info', 'user', 'validate_only', ), 'query_eligible_billing_accounts': ('customer', 'skus', ), - 'register_subscriber': ('account', 'service_account', ), + 'register_subscriber': ('service_account', 'account', 'integrator', ), 'run_report_job': ('name', 'date_range', 'filter', 'language_code', ), 'start_paid_service': ('name', 'request_id', ), 'suspend_entitlement': ('name', 'request_id', ), 'transfer_entitlements': ('parent', 'entitlements', 'auth_token', 'request_id', ), 'transfer_entitlements_to_google': ('parent', 'entitlements', 'request_id', ), - 'unregister_subscriber': ('account', 'service_account', ), + 'unregister_subscriber': ('service_account', 'account', 'integrator', ), 'update_channel_partner_link': ('name', 'channel_partner_link', 'update_mask', ), 'update_channel_partner_repricing_config': ('channel_partner_repricing_config', ), 'update_customer': ('customer', 'update_mask', ), diff --git a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py index 53317d3de8c3..160685ef4667 100644 --- a/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py +++ b/packages/google-cloud-channel/tests/unit/gapic/channel_v1/test_cloud_channel_service.py @@ -5041,6 +5041,7 @@ def test_get_entitlement(request_type, transport: str = "grpc"): ], purchase_order_id="purchase_order_id_value", billing_account="billing_account_value", + price_reference_id="price_reference_id_value", ) response = client.get_entitlement(request) @@ -5062,6 +5063,7 @@ def test_get_entitlement(request_type, transport: str = "grpc"): ] assert response.purchase_order_id == "purchase_order_id_value" assert response.billing_account == "billing_account_value" + assert response.price_reference_id == "price_reference_id_value" def test_get_entitlement_non_empty_request_with_auto_populated_field(): @@ -5195,6 +5197,7 @@ async def test_get_entitlement_async( ], purchase_order_id="purchase_order_id_value", billing_account="billing_account_value", + price_reference_id="price_reference_id_value", ) ) response = await client.get_entitlement(request) @@ -5217,6 +5220,7 @@ async def test_get_entitlement_async( ] assert response.purchase_order_id == "purchase_order_id_value" assert response.billing_account == "billing_account_value" + assert response.price_reference_id == "price_reference_id_value" @pytest.mark.asyncio @@ -6119,6 +6123,7 @@ def test_change_offer_non_empty_request_with_auto_populated_field(): purchase_order_id="purchase_order_id_value", request_id="request_id_value", billing_account="billing_account_value", + price_reference_id="price_reference_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6135,6 +6140,7 @@ def test_change_offer_non_empty_request_with_auto_populated_field(): purchase_order_id="purchase_order_id_value", request_id="request_id_value", billing_account="billing_account_value", + price_reference_id="price_reference_id_value", ) @@ -16840,6 +16846,7 @@ def test_register_subscriber_non_empty_request_with_auto_populated_field(): request = service.RegisterSubscriberRequest( account="account_value", service_account="service_account_value", + integrator="integrator_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -16855,6 +16862,7 @@ def test_register_subscriber_non_empty_request_with_auto_populated_field(): assert args[0] == service.RegisterSubscriberRequest( account="account_value", service_account="service_account_value", + integrator="integrator_value", ) @@ -17097,6 +17105,7 @@ def test_unregister_subscriber_non_empty_request_with_auto_populated_field(): request = service.UnregisterSubscriberRequest( account="account_value", service_account="service_account_value", + integrator="integrator_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -17112,6 +17121,7 @@ def test_unregister_subscriber_non_empty_request_with_auto_populated_field(): assert args[0] == service.UnregisterSubscriberRequest( account="account_value", service_account="service_account_value", + integrator="integrator_value", ) @@ -17357,6 +17367,7 @@ def test_list_subscribers_non_empty_request_with_auto_populated_field(): request = service.ListSubscribersRequest( account="account_value", page_token="page_token_value", + integrator="integrator_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -17370,6 +17381,7 @@ def test_list_subscribers_non_empty_request_with_auto_populated_field(): assert args[0] == service.ListSubscribersRequest( account="account_value", page_token="page_token_value", + integrator="integrator_value", ) @@ -19875,6 +19887,7 @@ async def test_get_entitlement_empty_call_grpc_asyncio(): ], purchase_order_id="purchase_order_id_value", billing_account="billing_account_value", + price_reference_id="price_reference_id_value", ) ) await client.get_entitlement(request=None) @@ -21403,9 +21416,29 @@ def test_cloud_channel_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client -def test_billing_account_path(): +def test_account_path(): account = "squid" - billing_account = "clam" + expected = "accounts/{account}".format( + account=account, + ) + actual = CloudChannelServiceClient.account_path(account) + assert expected == actual + + +def test_parse_account_path(): + expected = { + "account": "clam", + } + path = CloudChannelServiceClient.account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudChannelServiceClient.parse_account_path(path) + assert expected == actual + + +def test_billing_account_path(): + account = "whelk" + billing_account = "octopus" expected = "accounts/{account}/billingAccounts/{billing_account}".format( account=account, billing_account=billing_account, @@ -21416,8 +21449,8 @@ def test_billing_account_path(): def test_parse_billing_account_path(): expected = { - "account": "whelk", - "billing_account": "octopus", + "account": "oyster", + "billing_account": "nudibranch", } path = CloudChannelServiceClient.billing_account_path(**expected) @@ -21427,8 +21460,8 @@ def test_parse_billing_account_path(): def test_channel_partner_link_path(): - account = "oyster" - channel_partner_link = "nudibranch" + account = "cuttlefish" + channel_partner_link = "mussel" expected = "accounts/{account}/channelPartnerLinks/{channel_partner_link}".format( account=account, channel_partner_link=channel_partner_link, @@ -21441,8 +21474,8 @@ def test_channel_partner_link_path(): def test_parse_channel_partner_link_path(): expected = { - "account": "cuttlefish", - "channel_partner_link": "mussel", + "account": "winkle", + "channel_partner_link": "nautilus", } path = CloudChannelServiceClient.channel_partner_link_path(**expected) @@ -21452,9 +21485,9 @@ def test_parse_channel_partner_link_path(): def test_channel_partner_repricing_config_path(): - account = "winkle" - channel_partner = "nautilus" - channel_partner_repricing_config = "scallop" + account = "scallop" + channel_partner = "abalone" + channel_partner_repricing_config = "squid" expected = "accounts/{account}/channelPartnerLinks/{channel_partner}/channelPartnerRepricingConfigs/{channel_partner_repricing_config}".format( account=account, channel_partner=channel_partner, @@ -21468,9 +21501,9 @@ def test_channel_partner_repricing_config_path(): def test_parse_channel_partner_repricing_config_path(): expected = { - "account": "abalone", - "channel_partner": "squid", - "channel_partner_repricing_config": "clam", + "account": "clam", + "channel_partner": "whelk", + "channel_partner_repricing_config": "octopus", } path = CloudChannelServiceClient.channel_partner_repricing_config_path(**expected) @@ -21480,8 +21513,8 @@ def test_parse_channel_partner_repricing_config_path(): def test_customer_path(): - account = "whelk" - customer = "octopus" + account = "oyster" + customer = "nudibranch" expected = "accounts/{account}/customers/{customer}".format( account=account, customer=customer, @@ -21492,8 +21525,8 @@ def test_customer_path(): def test_parse_customer_path(): expected = { - "account": "oyster", - "customer": "nudibranch", + "account": "cuttlefish", + "customer": "mussel", } path = CloudChannelServiceClient.customer_path(**expected) @@ -21503,9 +21536,9 @@ def test_parse_customer_path(): def test_customer_repricing_config_path(): - account = "cuttlefish" - customer = "mussel" - customer_repricing_config = "winkle" + account = "winkle" + customer = "nautilus" + customer_repricing_config = "scallop" expected = "accounts/{account}/customers/{customer}/customerRepricingConfigs/{customer_repricing_config}".format( account=account, customer=customer, @@ -21519,9 +21552,9 @@ def test_customer_repricing_config_path(): def test_parse_customer_repricing_config_path(): expected = { - "account": "nautilus", - "customer": "scallop", - "customer_repricing_config": "abalone", + "account": "abalone", + "customer": "squid", + "customer_repricing_config": "clam", } path = CloudChannelServiceClient.customer_repricing_config_path(**expected) @@ -21531,9 +21564,9 @@ def test_parse_customer_repricing_config_path(): def test_entitlement_path(): - account = "squid" - customer = "clam" - entitlement = "whelk" + account = "whelk" + customer = "octopus" + entitlement = "oyster" expected = ( "accounts/{account}/customers/{customer}/entitlements/{entitlement}".format( account=account, @@ -21547,9 +21580,9 @@ def test_entitlement_path(): def test_parse_entitlement_path(): expected = { - "account": "octopus", - "customer": "oyster", - "entitlement": "nudibranch", + "account": "nudibranch", + "customer": "cuttlefish", + "entitlement": "mussel", } path = CloudChannelServiceClient.entitlement_path(**expected) @@ -21559,8 +21592,8 @@ def test_parse_entitlement_path(): def test_offer_path(): - account = "cuttlefish" - offer = "mussel" + account = "winkle" + offer = "nautilus" expected = "accounts/{account}/offers/{offer}".format( account=account, offer=offer, @@ -21571,8 +21604,8 @@ def test_offer_path(): def test_parse_offer_path(): expected = { - "account": "winkle", - "offer": "nautilus", + "account": "scallop", + "offer": "abalone", } path = CloudChannelServiceClient.offer_path(**expected) @@ -21582,7 +21615,7 @@ def test_parse_offer_path(): def test_product_path(): - product = "scallop" + product = "squid" expected = "products/{product}".format( product=product, ) @@ -21592,7 +21625,7 @@ def test_product_path(): def test_parse_product_path(): expected = { - "product": "abalone", + "product": "clam", } path = CloudChannelServiceClient.product_path(**expected) @@ -21602,8 +21635,8 @@ def test_parse_product_path(): def test_sku_path(): - product = "squid" - sku = "clam" + product = "whelk" + sku = "octopus" expected = "products/{product}/skus/{sku}".format( product=product, sku=sku, @@ -21614,8 +21647,8 @@ def test_sku_path(): def test_parse_sku_path(): expected = { - "product": "whelk", - "sku": "octopus", + "product": "oyster", + "sku": "nudibranch", } path = CloudChannelServiceClient.sku_path(**expected) @@ -21625,8 +21658,8 @@ def test_parse_sku_path(): def test_sku_group_path(): - account = "oyster" - sku_group = "nudibranch" + account = "cuttlefish" + sku_group = "mussel" expected = "accounts/{account}/skuGroups/{sku_group}".format( account=account, sku_group=sku_group, @@ -21637,8 +21670,8 @@ def test_sku_group_path(): def test_parse_sku_group_path(): expected = { - "account": "cuttlefish", - "sku_group": "mussel", + "account": "winkle", + "sku_group": "nautilus", } path = CloudChannelServiceClient.sku_group_path(**expected) @@ -21648,7 +21681,7 @@ def test_parse_sku_group_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -21658,7 +21691,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "abalone", } path = CloudChannelServiceClient.common_billing_account_path(**expected) @@ -21668,7 +21701,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -21678,7 +21711,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "clam", } path = CloudChannelServiceClient.common_folder_path(**expected) @@ -21688,7 +21721,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -21698,7 +21731,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "octopus", } path = CloudChannelServiceClient.common_organization_path(**expected) @@ -21708,7 +21741,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -21718,7 +21751,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "nudibranch", } path = CloudChannelServiceClient.common_project_path(**expected) @@ -21728,8 +21761,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -21740,8 +21773,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "winkle", + "location": "nautilus", } path = CloudChannelServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py index 72e92467ae9e..788b32c21ac8 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud/__init__.py @@ -47,6 +47,9 @@ AttachedProxyConfig, AttachedServerConfig, KubernetesSecret, + Label, + SystemComponentsConfig, + Toleration, ) from google.cloud.gke_multicloud_v1.types.attached_service import ( CreateAttachedClusterRequest, @@ -200,6 +203,9 @@ "AttachedProxyConfig", "AttachedServerConfig", "KubernetesSecret", + "Label", + "SystemComponentsConfig", + "Toleration", "CreateAttachedClusterRequest", "DeleteAttachedClusterRequest", "GenerateAttachedClusterAgentTokenRequest", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py index 02144e9e929c..be5301aaa2d1 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/__init__.py @@ -35,6 +35,9 @@ AttachedProxyConfig, AttachedServerConfig, KubernetesSecret, + Label, + SystemComponentsConfig, + Toleration, ) from .types.attached_service import ( CreateAttachedClusterRequest, @@ -278,6 +281,7 @@ "ImportAttachedClusterRequest", "Jwk", "KubernetesSecret", + "Label", "ListAttachedClustersRequest", "ListAttachedClustersResponse", "ListAwsClustersRequest", @@ -303,6 +307,8 @@ "SecurityPostureConfig", "SpotConfig", "SurgeSettings", + "SystemComponentsConfig", + "Toleration", "UpdateAttachedClusterRequest", "UpdateAwsClusterRequest", "UpdateAwsNodePoolRequest", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py index a00f321f329c..30a6630bffec 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/async_client.py @@ -28,6 +28,7 @@ Type, Union, ) +import warnings from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -429,6 +430,11 @@ async def sample_create_aws_cluster(): An Anthos cluster running on AWS. """ + warnings.warn( + "AwsClustersAsyncClient.create_aws_cluster is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -612,6 +618,11 @@ async def sample_update_aws_cluster(): An Anthos cluster running on AWS. """ + warnings.warn( + "AwsClustersAsyncClient.update_aws_cluster is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -743,6 +754,10 @@ async def sample_get_aws_cluster(): google.cloud.gke_multicloud_v1.types.AwsCluster: An Anthos cluster running on AWS. """ + warnings.warn( + "AwsClustersAsyncClient.get_aws_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -868,6 +883,10 @@ async def sample_list_aws_clusters(): resolve additional pages automatically. """ + warnings.warn( + "AwsClustersAsyncClient.list_aws_clusters is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1022,6 +1041,11 @@ async def sample_delete_aws_cluster(): } """ + warnings.warn( + "AwsClustersAsyncClient.delete_aws_cluster is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1135,6 +1159,11 @@ async def sample_generate_aws_cluster_agent_token(): google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenResponse: """ + warnings.warn( + "AwsClustersAsyncClient.generate_aws_cluster_agent_token is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1226,6 +1255,11 @@ async def sample_generate_aws_access_token(): method. """ + warnings.warn( + "AwsClustersAsyncClient.generate_aws_access_token is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1380,6 +1414,11 @@ async def sample_create_aws_node_pool(): An Anthos node pool running on AWS. """ + warnings.warn( + "AwsClustersAsyncClient.create_aws_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1562,6 +1601,11 @@ async def sample_update_aws_node_pool(): An Anthos node pool running on AWS. """ + warnings.warn( + "AwsClustersAsyncClient.update_aws_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1707,6 +1751,11 @@ async def sample_rollback_aws_node_pool_update(): An Anthos node pool running on AWS. """ + warnings.warn( + "AwsClustersAsyncClient.rollback_aws_node_pool_update is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1834,6 +1883,10 @@ async def sample_get_aws_node_pool(): google.cloud.gke_multicloud_v1.types.AwsNodePool: An Anthos node pool running on AWS. """ + warnings.warn( + "AwsClustersAsyncClient.get_aws_node_pool is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1961,6 +2014,11 @@ async def sample_list_aws_node_pools(): resolve additional pages automatically. """ + warnings.warn( + "AwsClustersAsyncClient.list_aws_node_pools is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2111,6 +2169,11 @@ async def sample_delete_aws_node_pool(): } """ + warnings.warn( + "AwsClustersAsyncClient.delete_aws_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2229,6 +2292,11 @@ async def sample_get_aws_open_id_config(): details. """ + warnings.warn( + "AwsClustersAsyncClient.get_aws_open_id_config is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2321,6 +2389,11 @@ async def sample_get_aws_json_web_keys(): Key Set as specififed in RFC 7517. """ + warnings.warn( + "AwsClustersAsyncClient.get_aws_json_web_keys is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2427,6 +2500,11 @@ async def sample_get_aws_server_config(): of GKE cluster on AWS. """ + warnings.warn( + "AwsClustersAsyncClient.get_aws_server_config is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py index 64f431495872..ea3069c07121 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/aws_clusters/client.py @@ -891,6 +891,10 @@ def sample_create_aws_cluster(): An Anthos cluster running on AWS. """ + warnings.warn( + "AwsClustersClient.create_aws_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1071,6 +1075,10 @@ def sample_update_aws_cluster(): An Anthos cluster running on AWS. """ + warnings.warn( + "AwsClustersClient.update_aws_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1199,6 +1207,10 @@ def sample_get_aws_cluster(): google.cloud.gke_multicloud_v1.types.AwsCluster: An Anthos cluster running on AWS. """ + warnings.warn( + "AwsClustersClient.get_aws_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1321,6 +1333,10 @@ def sample_list_aws_clusters(): resolve additional pages automatically. """ + warnings.warn( + "AwsClustersClient.list_aws_clusters is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1472,6 +1488,10 @@ def sample_delete_aws_cluster(): } """ + warnings.warn( + "AwsClustersClient.delete_aws_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1582,6 +1602,11 @@ def sample_generate_aws_cluster_agent_token(): google.cloud.gke_multicloud_v1.types.GenerateAwsClusterAgentTokenResponse: """ + warnings.warn( + "AwsClustersClient.generate_aws_cluster_agent_token is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1673,6 +1698,11 @@ def sample_generate_aws_access_token(): method. """ + warnings.warn( + "AwsClustersClient.generate_aws_access_token is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1827,6 +1857,10 @@ def sample_create_aws_node_pool(): An Anthos node pool running on AWS. """ + warnings.warn( + "AwsClustersClient.create_aws_node_pool is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2006,6 +2040,10 @@ def sample_update_aws_node_pool(): An Anthos node pool running on AWS. """ + warnings.warn( + "AwsClustersClient.update_aws_node_pool is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2148,6 +2186,11 @@ def sample_rollback_aws_node_pool_update(): An Anthos node pool running on AWS. """ + warnings.warn( + "AwsClustersClient.rollback_aws_node_pool_update is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2274,6 +2317,10 @@ def sample_get_aws_node_pool(): google.cloud.gke_multicloud_v1.types.AwsNodePool: An Anthos node pool running on AWS. """ + warnings.warn( + "AwsClustersClient.get_aws_node_pool is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2398,6 +2445,10 @@ def sample_list_aws_node_pools(): resolve additional pages automatically. """ + warnings.warn( + "AwsClustersClient.list_aws_node_pools is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2545,6 +2596,10 @@ def sample_delete_aws_node_pool(): } """ + warnings.warn( + "AwsClustersClient.delete_aws_node_pool is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2660,6 +2715,10 @@ def sample_get_aws_open_id_config(): details. """ + warnings.warn( + "AwsClustersClient.get_aws_open_id_config is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2750,6 +2809,10 @@ def sample_get_aws_json_web_keys(): Key Set as specififed in RFC 7517. """ + warnings.warn( + "AwsClustersClient.get_aws_json_web_keys is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2854,6 +2917,10 @@ def sample_get_aws_server_config(): of GKE cluster on AWS. """ + warnings.warn( + "AwsClustersClient.get_aws_server_config is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py index 24ae5bf4cd9b..ac15fff79398 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/async_client.py @@ -28,6 +28,7 @@ Type, Union, ) +import warnings from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -449,6 +450,11 @@ async def sample_create_azure_client(): Active Directory Application and tenant. """ + warnings.warn( + "AzureClustersAsyncClient.create_azure_client is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -597,6 +603,11 @@ async def sample_get_azure_client(): Active Directory Application and tenant. """ + warnings.warn( + "AzureClustersAsyncClient.get_azure_client is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -724,6 +735,11 @@ async def sample_list_azure_clients(): resolve additional pages automatically. """ + warnings.warn( + "AzureClustersAsyncClient.list_azure_clients is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -878,6 +894,11 @@ async def sample_delete_azure_client(): } """ + warnings.warn( + "AzureClustersAsyncClient.delete_azure_client is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1055,6 +1076,11 @@ async def sample_create_azure_cluster(): An Anthos cluster running on Azure. """ + warnings.warn( + "AzureClustersAsyncClient.create_azure_cluster is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1225,6 +1251,11 @@ async def sample_update_azure_cluster(): An Anthos cluster running on Azure. """ + warnings.warn( + "AzureClustersAsyncClient.update_azure_cluster is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1357,6 +1388,11 @@ async def sample_get_azure_cluster(): google.cloud.gke_multicloud_v1.types.AzureCluster: An Anthos cluster running on Azure. """ + warnings.warn( + "AzureClustersAsyncClient.get_azure_cluster is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1484,6 +1520,11 @@ async def sample_list_azure_clusters(): resolve additional pages automatically. """ + warnings.warn( + "AzureClustersAsyncClient.list_azure_clusters is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1639,6 +1680,11 @@ async def sample_delete_azure_cluster(): } """ + warnings.warn( + "AzureClustersAsyncClient.delete_azure_cluster is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1752,6 +1798,11 @@ async def sample_generate_azure_cluster_agent_token(): google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenResponse: """ + warnings.warn( + "AzureClustersAsyncClient.generate_azure_cluster_agent_token is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1844,6 +1895,11 @@ async def sample_generate_azure_access_token(): AzureClusters.GenerateAzureAccessToken method. """ + warnings.warn( + "AzureClustersAsyncClient.generate_azure_access_token is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -1997,6 +2053,11 @@ async def sample_create_azure_node_pool(): An Anthos node pool running on Azure. """ + warnings.warn( + "AzureClustersAsyncClient.create_azure_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2156,6 +2217,11 @@ async def sample_update_azure_node_pool(): An Anthos node pool running on Azure. """ + warnings.warn( + "AzureClustersAsyncClient.update_azure_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2287,6 +2353,11 @@ async def sample_get_azure_node_pool(): google.cloud.gke_multicloud_v1.types.AzureNodePool: An Anthos node pool running on Azure. """ + warnings.warn( + "AzureClustersAsyncClient.get_azure_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2414,6 +2485,11 @@ async def sample_list_azure_node_pools(): resolve additional pages automatically. """ + warnings.warn( + "AzureClustersAsyncClient.list_azure_node_pools is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2564,6 +2640,11 @@ async def sample_delete_azure_node_pool(): } """ + warnings.warn( + "AzureClustersAsyncClient.delete_azure_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2695,6 +2776,11 @@ async def sample_get_azure_open_id_config(): specification for details. """ + warnings.warn( + "AzureClustersAsyncClient.get_azure_open_id_config is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2813,6 +2899,11 @@ async def sample_get_azure_json_web_keys(): Key Set as specififed in RFC 7517. """ + warnings.warn( + "AzureClustersAsyncClient.get_azure_json_web_keys is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2940,6 +3031,11 @@ async def sample_get_azure_server_config(): regions and Kubernetes versions. """ + warnings.warn( + "AzureClustersAsyncClient.get_azure_server_config is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py index c08046a3b3b4..d6c62279858b 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/services/azure_clusters/client.py @@ -922,6 +922,10 @@ def sample_create_azure_client(): Active Directory Application and tenant. """ + warnings.warn( + "AzureClustersClient.create_azure_client is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1067,6 +1071,10 @@ def sample_get_azure_client(): Active Directory Application and tenant. """ + warnings.warn( + "AzureClustersClient.get_azure_client is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1191,6 +1199,10 @@ def sample_list_azure_clients(): resolve additional pages automatically. """ + warnings.warn( + "AzureClustersClient.list_azure_clients is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1342,6 +1354,10 @@ def sample_delete_azure_client(): } """ + warnings.warn( + "AzureClustersClient.delete_azure_client is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1516,6 +1532,10 @@ def sample_create_azure_cluster(): An Anthos cluster running on Azure. """ + warnings.warn( + "AzureClustersClient.create_azure_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1683,6 +1703,10 @@ def sample_update_azure_cluster(): An Anthos cluster running on Azure. """ + warnings.warn( + "AzureClustersClient.update_azure_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1812,6 +1836,10 @@ def sample_get_azure_cluster(): google.cloud.gke_multicloud_v1.types.AzureCluster: An Anthos cluster running on Azure. """ + warnings.warn( + "AzureClustersClient.get_azure_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -1936,6 +1964,10 @@ def sample_list_azure_clusters(): resolve additional pages automatically. """ + warnings.warn( + "AzureClustersClient.list_azure_clusters is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2088,6 +2120,10 @@ def sample_delete_azure_cluster(): } """ + warnings.warn( + "AzureClustersClient.delete_azure_cluster is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2198,6 +2234,11 @@ def sample_generate_azure_cluster_agent_token(): google.cloud.gke_multicloud_v1.types.GenerateAzureClusterAgentTokenResponse: """ + warnings.warn( + "AzureClustersClient.generate_azure_cluster_agent_token is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2290,6 +2331,11 @@ def sample_generate_azure_access_token(): AzureClusters.GenerateAzureAccessToken method. """ + warnings.warn( + "AzureClustersClient.generate_azure_access_token is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. @@ -2443,6 +2489,11 @@ def sample_create_azure_node_pool(): An Anthos node pool running on Azure. """ + warnings.warn( + "AzureClustersClient.create_azure_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2599,6 +2650,11 @@ def sample_update_azure_node_pool(): An Anthos node pool running on Azure. """ + warnings.warn( + "AzureClustersClient.update_azure_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2727,6 +2783,10 @@ def sample_get_azure_node_pool(): google.cloud.gke_multicloud_v1.types.AzureNodePool: An Anthos node pool running on Azure. """ + warnings.warn( + "AzureClustersClient.get_azure_node_pool is deprecated", DeprecationWarning + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2851,6 +2911,11 @@ def sample_list_azure_node_pools(): resolve additional pages automatically. """ + warnings.warn( + "AzureClustersClient.list_azure_node_pools is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -2998,6 +3063,11 @@ def sample_delete_azure_node_pool(): } """ + warnings.warn( + "AzureClustersClient.delete_azure_node_pool is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -3126,6 +3196,11 @@ def sample_get_azure_open_id_config(): specification for details. """ + warnings.warn( + "AzureClustersClient.get_azure_open_id_config is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -3241,6 +3316,11 @@ def sample_get_azure_json_web_keys(): Key Set as specififed in RFC 7517. """ + warnings.warn( + "AzureClustersClient.get_azure_json_web_keys is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. @@ -3365,6 +3445,11 @@ def sample_get_azure_server_config(): regions and Kubernetes versions. """ + warnings.warn( + "AzureClustersClient.get_azure_server_config is deprecated", + DeprecationWarning, + ) + # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py index a46a98ec7c1e..e4640dabf4a5 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/__init__.py @@ -24,6 +24,9 @@ AttachedProxyConfig, AttachedServerConfig, KubernetesSecret, + Label, + SystemComponentsConfig, + Toleration, ) from .attached_service import ( CreateAttachedClusterRequest, @@ -171,6 +174,9 @@ "AttachedProxyConfig", "AttachedServerConfig", "KubernetesSecret", + "Label", + "SystemComponentsConfig", + "Toleration", "CreateAttachedClusterRequest", "DeleteAttachedClusterRequest", "GenerateAttachedClusterAgentTokenRequest", diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py index 67503c16c80b..2bcbaeaa5b1a 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/attached_resources.py @@ -18,6 +18,7 @@ from typing import MutableMapping, MutableSequence from google.protobuf import timestamp_pb2 # type: ignore +from google.type import date_pb2 # type: ignore import proto # type: ignore from google.cloud.gke_multicloud_v1.types import common_resources @@ -35,6 +36,9 @@ "AttachedClusterError", "AttachedProxyConfig", "KubernetesSecret", + "SystemComponentsConfig", + "Toleration", + "Label", }, ) @@ -138,21 +142,25 @@ class AttachedCluster(proto.Message): Optional. Security Posture configuration for this cluster. tags (MutableMapping[str, str]): - Optional. Input only. Tag keys/values directly bound to this - resource. - - Tag key must be specified in the format / where the tag - namespace is the ID of the organization or name of the - project that the tag key is defined in. The short name of a - tag key or value can have a maximum length of 256 - characters. The permitted character set for the short name - includes UTF-8 encoded Unicode characters except single - quotes ('), double quotes ("), backslashes (), and forward - slashes (/). + Optional. Input only. Tag keys and values directly bound to + this resource. + + The tag key must be specified in the format + ``/,`` where the tag namespace + is the ID of the organization or name of the project that + the tag key is defined in. The short name of a tag key or + value can have a maximum length of 256 characters. The + permitted character set for the short name includes UTF-8 + encoded Unicode characters except single quotation marks + (``'``), double quotation marks (``"``), backslashes + (``\``), and forward slashes (``/``). See `Tags `__ for more details on Google Cloud Platform tags. + system_components_config (google.cloud.gke_multicloud_v1.types.SystemComponentsConfig): + Optional. Kubernetes configurations for + auto-installed components on the cluster. """ class State(proto.Enum): @@ -301,6 +309,11 @@ class State(proto.Enum): proto.STRING, number=27, ) + system_components_config: "SystemComponentsConfig" = proto.Field( + proto.MESSAGE, + number=28, + message="SystemComponentsConfig", + ) class AttachedClustersAuthorization(proto.Message): @@ -438,12 +451,48 @@ class AttachedPlatformVersionInfo(proto.Message): Attributes: version (str): Platform version name. + enabled (bool): + Optional. True if the version is available + for attachedcluster creation. If a version is + enabled, it can be used to attach new clusters. + end_of_life (bool): + Optional. True if this cluster version + belongs to a minor version that has reached its + end of life and is no longer in scope to receive + security and bug fixes. + end_of_life_date (google.type.date_pb2.Date): + Optional. The estimated date (in Pacific Time) when this + cluster version will reach its end of life. Or if this + version is no longer supported (the ``end_of_life`` field is + true), this is the actual date (in Pacific time) when the + version reached its end of life. + release_date (google.type.date_pb2.Date): + Optional. The date (in Pacific Time) when the + cluster version was released. """ version: str = proto.Field( proto.STRING, number=1, ) + enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + end_of_life: bool = proto.Field( + proto.BOOL, + number=4, + ) + end_of_life_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=5, + message=date_pb2.Date, + ) + release_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=6, + message=date_pb2.Date, + ) class AttachedClusterError(proto.Message): @@ -504,4 +553,123 @@ class KubernetesSecret(proto.Message): ) +class SystemComponentsConfig(proto.Message): + r"""SystemComponentsConfig defines the fields for customizing + configurations for auto-installed components. + + Attributes: + tolerations (MutableSequence[google.cloud.gke_multicloud_v1.types.Toleration]): + Sets custom tolerations for pods created by + auto-installed components. + labels (MutableSequence[google.cloud.gke_multicloud_v1.types.Label]): + Sets custom labels for pods created by + auto-installed components. + """ + + tolerations: MutableSequence["Toleration"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Toleration", + ) + labels: MutableSequence["Label"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="Label", + ) + + +class Toleration(proto.Message): + r"""Toleration defines the fields for tolerations for pods + created by auto-installed components. + + Attributes: + key (str): + Key is the taint key that the toleration + applies to. + value (str): + Value is the taint value that the toleration + applies to. + key_operator (google.cloud.gke_multicloud_v1.types.Toleration.KeyOperator): + KeyOperator represents a key's relationship + to the value e.g. 'Exist'. + effect (google.cloud.gke_multicloud_v1.types.Toleration.Effect): + Effect indicates the taint effect to match + e.g. 'NoSchedule' + """ + + class KeyOperator(proto.Enum): + r"""KeyOperator represents a key's relationship to the value e.g. + 'Equal'. + + Values: + KEY_OPERATOR_UNSPECIFIED (0): + Operator is not specified. + KEY_OPERATOR_EQUAL (1): + Operator maps to 'Equal'. + KEY_OPERATOR_EXISTS (2): + Operator maps to 'Exists'. + """ + KEY_OPERATOR_UNSPECIFIED = 0 + KEY_OPERATOR_EQUAL = 1 + KEY_OPERATOR_EXISTS = 2 + + class Effect(proto.Enum): + r"""Effect indicates the taint effect to match e.g. 'NoSchedule'. + + Values: + EFFECT_UNSPECIFIED (0): + Effect is not specified. + EFFECT_NO_SCHEDULE (1): + Effect maps to 'NoSchedule'. + EFFECT_PREFER_NO_SCHEDULE (2): + Effect maps to 'PreferNoSchedule'. + EFFECT_NO_EXECUTE (3): + Effect maps to 'NoExecute'. + """ + EFFECT_UNSPECIFIED = 0 + EFFECT_NO_SCHEDULE = 1 + EFFECT_PREFER_NO_SCHEDULE = 2 + EFFECT_NO_EXECUTE = 3 + + key: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + key_operator: KeyOperator = proto.Field( + proto.ENUM, + number=3, + enum=KeyOperator, + ) + effect: Effect = proto.Field( + proto.ENUM, + number=4, + enum=Effect, + ) + + +class Label(proto.Message): + r"""Label defines the additional fields for labels for pods + created by auto-installed components. + + Attributes: + key (str): + This is the key of the label. + value (str): + This is the value of the label. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py index 90c90ffb8870..1a8c7cca0459 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/aws_resources.py @@ -559,7 +559,7 @@ class AwsVolumeTemplate(proto.Message): Optional. The throughput that the volume supports, in MiB/s. Only valid if volume_type is GP3. - If the volume_type is GP3 and this is not speficied, it + If the volume_type is GP3 and this is not specified, it defaults to 125. kms_key_arn (str): Optional. The Amazon Resource Name (ARN) of diff --git a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py index 245c219f0bfb..372891d7ae57 100644 --- a/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py +++ b/packages/google-cloud-gke-multicloud/google/cloud/gke_multicloud_v1/types/common_resources.py @@ -171,8 +171,8 @@ class OperationMetadata(proto.Message): values are "create", "delete", "update" and "import". requested_cancellation (bool): - Output only. Identifies whether it has been requested - cancellation for the operation. Operations that have + Output only. Identifies whether cancellation has been + requested for the operation. Operations that have successfully been cancelled have [google.longrunning.Operation.error][google.longrunning.Operation.error] value with a diff --git a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py index c6ed19ebc603..ec4278d85d82 100644 --- a/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py +++ b/packages/google-cloud-gke-multicloud/tests/unit/gapic/gke_multicloud_v1/test_attached_clusters.py @@ -7066,6 +7066,17 @@ def test_create_attached_cluster_rest_call_success(request_type): "binary_authorization": {"evaluation_mode": 1}, "security_posture_config": {"vulnerability_mode": 1}, "tags": {}, + "system_components_config": { + "tolerations": [ + { + "key": "key_value", + "value": "value_value", + "key_operator": 1, + "effect": 1, + } + ], + "labels": [{"key": "key_value", "value": "value_value"}], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -7305,6 +7316,17 @@ def test_update_attached_cluster_rest_call_success(request_type): "binary_authorization": {"evaluation_mode": 1}, "security_posture_config": {"vulnerability_mode": 1}, "tags": {}, + "system_components_config": { + "tolerations": [ + { + "key": "key_value", + "value": "value_value", + "key_operator": 1, + "effect": 1, + } + ], + "labels": [{"key": "key_value", "value": "value_value"}], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-network-security/docs/network_security_v1beta1/dns_threat_detector_service.rst b/packages/google-cloud-network-security/docs/network_security_v1beta1/dns_threat_detector_service.rst new file mode 100644 index 000000000000..a8a10e1fb56c --- /dev/null +++ b/packages/google-cloud-network-security/docs/network_security_v1beta1/dns_threat_detector_service.rst @@ -0,0 +1,10 @@ +DnsThreatDetectorService +------------------------------------------ + +.. automodule:: google.cloud.network_security_v1beta1.services.dns_threat_detector_service + :members: + :inherited-members: + +.. automodule:: google.cloud.network_security_v1beta1.services.dns_threat_detector_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-security/docs/network_security_v1beta1/services_.rst b/packages/google-cloud-network-security/docs/network_security_v1beta1/services_.rst index dd4dc6164423..435967cc366b 100644 --- a/packages/google-cloud-network-security/docs/network_security_v1beta1/services_.rst +++ b/packages/google-cloud-network-security/docs/network_security_v1beta1/services_.rst @@ -3,4 +3,5 @@ Services for Google Cloud Network Security v1beta1 API .. toctree:: :maxdepth: 2 + dns_threat_detector_service network_security diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/__init__.py index 6a5c323ada9f..f43966bf6b85 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/__init__.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/__init__.py @@ -18,6 +18,10 @@ __version__ = package_version.__version__ +from .services.dns_threat_detector_service import ( + DnsThreatDetectorServiceAsyncClient, + DnsThreatDetectorServiceClient, +) from .services.network_security import NetworkSecurityAsyncClient, NetworkSecurityClient from .types.authorization_policy import ( AuthorizationPolicy, @@ -38,6 +42,15 @@ UpdateClientTlsPolicyRequest, ) from .types.common import OperationMetadata +from .types.dns_threat_detector import ( + CreateDnsThreatDetectorRequest, + DeleteDnsThreatDetectorRequest, + DnsThreatDetector, + GetDnsThreatDetectorRequest, + ListDnsThreatDetectorsRequest, + ListDnsThreatDetectorsResponse, + UpdateDnsThreatDetectorRequest, +) from .types.server_tls_policy import ( CreateServerTlsPolicyRequest, DeleteServerTlsPolicyRequest, @@ -55,6 +68,7 @@ ) __all__ = ( + "DnsThreatDetectorServiceAsyncClient", "NetworkSecurityAsyncClient", "AuthorizationPolicy", "CertificateProvider", @@ -62,18 +76,25 @@ "ClientTlsPolicy", "CreateAuthorizationPolicyRequest", "CreateClientTlsPolicyRequest", + "CreateDnsThreatDetectorRequest", "CreateServerTlsPolicyRequest", "DeleteAuthorizationPolicyRequest", "DeleteClientTlsPolicyRequest", + "DeleteDnsThreatDetectorRequest", "DeleteServerTlsPolicyRequest", + "DnsThreatDetector", + "DnsThreatDetectorServiceClient", "GetAuthorizationPolicyRequest", "GetClientTlsPolicyRequest", + "GetDnsThreatDetectorRequest", "GetServerTlsPolicyRequest", "GrpcEndpoint", "ListAuthorizationPoliciesRequest", "ListAuthorizationPoliciesResponse", "ListClientTlsPoliciesRequest", "ListClientTlsPoliciesResponse", + "ListDnsThreatDetectorsRequest", + "ListDnsThreatDetectorsResponse", "ListServerTlsPoliciesRequest", "ListServerTlsPoliciesResponse", "NetworkSecurityClient", @@ -81,6 +102,7 @@ "ServerTlsPolicy", "UpdateAuthorizationPolicyRequest", "UpdateClientTlsPolicyRequest", + "UpdateDnsThreatDetectorRequest", "UpdateServerTlsPolicyRequest", "ValidationCA", ) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/gapic_metadata.json b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/gapic_metadata.json index 9b081e363ca2..9ad1fe1517f5 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/gapic_metadata.json +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/gapic_metadata.json @@ -5,6 +5,100 @@ "protoPackage": "google.cloud.networksecurity.v1beta1", "schema": "1.0", "services": { + "DnsThreatDetectorService": { + "clients": { + "grpc": { + "libraryClient": "DnsThreatDetectorServiceClient", + "rpcs": { + "CreateDnsThreatDetector": { + "methods": [ + "create_dns_threat_detector" + ] + }, + "DeleteDnsThreatDetector": { + "methods": [ + "delete_dns_threat_detector" + ] + }, + "GetDnsThreatDetector": { + "methods": [ + "get_dns_threat_detector" + ] + }, + "ListDnsThreatDetectors": { + "methods": [ + "list_dns_threat_detectors" + ] + }, + "UpdateDnsThreatDetector": { + "methods": [ + "update_dns_threat_detector" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DnsThreatDetectorServiceAsyncClient", + "rpcs": { + "CreateDnsThreatDetector": { + "methods": [ + "create_dns_threat_detector" + ] + }, + "DeleteDnsThreatDetector": { + "methods": [ + "delete_dns_threat_detector" + ] + }, + "GetDnsThreatDetector": { + "methods": [ + "get_dns_threat_detector" + ] + }, + "ListDnsThreatDetectors": { + "methods": [ + "list_dns_threat_detectors" + ] + }, + "UpdateDnsThreatDetector": { + "methods": [ + "update_dns_threat_detector" + ] + } + } + }, + "rest": { + "libraryClient": "DnsThreatDetectorServiceClient", + "rpcs": { + "CreateDnsThreatDetector": { + "methods": [ + "create_dns_threat_detector" + ] + }, + "DeleteDnsThreatDetector": { + "methods": [ + "delete_dns_threat_detector" + ] + }, + "GetDnsThreatDetector": { + "methods": [ + "get_dns_threat_detector" + ] + }, + "ListDnsThreatDetectors": { + "methods": [ + "list_dns_threat_detectors" + ] + }, + "UpdateDnsThreatDetector": { + "methods": [ + "update_dns_threat_detector" + ] + } + } + } + } + }, "NetworkSecurity": { "clients": { "grpc": { diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/__init__.py new file mode 100644 index 000000000000..89757fff72c5 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DnsThreatDetectorServiceAsyncClient +from .client import DnsThreatDetectorServiceClient + +__all__ = ( + "DnsThreatDetectorServiceClient", + "DnsThreatDetectorServiceAsyncClient", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/async_client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/async_client.py new file mode 100644 index 000000000000..dcb64669dc24 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/async_client.py @@ -0,0 +1,1596 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1beta1.services.dns_threat_detector_service import ( + pagers, +) +from google.cloud.network_security_v1beta1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1beta1.types import dns_threat_detector + +from .client import DnsThreatDetectorServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport +from .transports.grpc_asyncio import DnsThreatDetectorServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class DnsThreatDetectorServiceAsyncClient: + """The Network Security API for DNS Threat Detectors.""" + + _client: DnsThreatDetectorServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DnsThreatDetectorServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + + dns_threat_detector_path = staticmethod( + DnsThreatDetectorServiceClient.dns_threat_detector_path + ) + parse_dns_threat_detector_path = staticmethod( + DnsThreatDetectorServiceClient.parse_dns_threat_detector_path + ) + network_path = staticmethod(DnsThreatDetectorServiceClient.network_path) + parse_network_path = staticmethod(DnsThreatDetectorServiceClient.parse_network_path) + common_billing_account_path = staticmethod( + DnsThreatDetectorServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DnsThreatDetectorServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DnsThreatDetectorServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + DnsThreatDetectorServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + DnsThreatDetectorServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + DnsThreatDetectorServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DnsThreatDetectorServiceAsyncClient: The constructed client. + """ + return DnsThreatDetectorServiceClient.from_service_account_info.__func__(DnsThreatDetectorServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DnsThreatDetectorServiceAsyncClient: The constructed client. + """ + return DnsThreatDetectorServiceClient.from_service_account_file.__func__(DnsThreatDetectorServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DnsThreatDetectorServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DnsThreatDetectorServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DnsThreatDetectorServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DnsThreatDetectorServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + DnsThreatDetectorServiceTransport, + Callable[..., DnsThreatDetectorServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dns threat detector service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DnsThreatDetectorServiceTransport,Callable[..., DnsThreatDetectorServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DnsThreatDetectorServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DnsThreatDetectorServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "credentialsType": None, + }, + ) + + async def list_dns_threat_detectors( + self, + request: Optional[ + Union[dns_threat_detector.ListDnsThreatDetectorsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDnsThreatDetectorsAsyncPager: + r"""Lists DnsThreatDetectors in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + async def sample_list_dns_threat_detectors(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1beta1.ListDnsThreatDetectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dns_threat_detectors(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsRequest, dict]]): + The request object. The message for requesting a list of + DnsThreatDetectors in the project. + parent (:class:`str`): + Required. The parent value for + ``ListDnsThreatDetectorsRequest``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1beta1.services.dns_threat_detector_service.pagers.ListDnsThreatDetectorsAsyncPager: + The response message to requesting a + list of DnsThreatDetectors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.ListDnsThreatDetectorsRequest): + request = dns_threat_detector.ListDnsThreatDetectorsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_dns_threat_detectors + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDnsThreatDetectorsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dns_threat_detector( + self, + request: Optional[ + Union[dns_threat_detector.GetDnsThreatDetectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dns_threat_detector.DnsThreatDetector: + r"""Gets the details of a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + async def sample_get_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1beta1.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1beta1.types.GetDnsThreatDetectorRequest, dict]]): + The request object. The message sent to get a + DnsThreatDetector. + name (:class:`str`): + Required. Name of the + DnsThreatDetector resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1beta1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.GetDnsThreatDetectorRequest): + request = dns_threat_detector.GetDnsThreatDetectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_dns_threat_detector( + self, + request: Optional[ + Union[gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + dns_threat_detector: Optional[gcn_dns_threat_detector.DnsThreatDetector] = None, + dns_threat_detector_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Creates a new DnsThreatDetector in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + async def sample_create_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1beta1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1beta1.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = await client.create_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1beta1.types.CreateDnsThreatDetectorRequest, dict]]): + The request object. The message to create a + DnsThreatDetector. + parent (:class:`str`): + Required. The value for the parent of + the DnsThreatDetector resource. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_threat_detector (:class:`google.cloud.network_security_v1beta1.types.DnsThreatDetector`): + Required. The ``DnsThreatDetector`` resource to create. + This corresponds to the ``dns_threat_detector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_threat_detector_id (:class:`str`): + Optional. The ID of the requesting + DnsThreatDetector object. If this field + is not supplied, the service generates + an identifier. + + This corresponds to the ``dns_threat_detector_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1beta1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, dns_threat_detector, dns_threat_detector_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_dns_threat_detector.CreateDnsThreatDetectorRequest + ): + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dns_threat_detector is not None: + request.dns_threat_detector = dns_threat_detector + if dns_threat_detector_id is not None: + request.dns_threat_detector_id = dns_threat_detector_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_dns_threat_detector( + self, + request: Optional[ + Union[gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, dict] + ] = None, + *, + dns_threat_detector: Optional[gcn_dns_threat_detector.DnsThreatDetector] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Updates a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + async def sample_update_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1beta1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1beta1.UpdateDnsThreatDetectorRequest( + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = await client.update_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_security_v1beta1.types.UpdateDnsThreatDetectorRequest, dict]]): + The request object. The message for updating a + DnsThreatDetector. + dns_threat_detector (:class:`google.cloud.network_security_v1beta1.types.DnsThreatDetector`): + Required. The DnsThreatDetector + resource being updated. + + This corresponds to the ``dns_threat_detector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The field mask is used to specify the fields + to be overwritten in the DnsThreatDetector resource by + the update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the mask is + not provided then all fields present in the request will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1beta1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [dns_threat_detector, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest + ): + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dns_threat_detector is not None: + request.dns_threat_detector = dns_threat_detector + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dns_threat_detector.name", request.dns_threat_detector.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dns_threat_detector( + self, + request: Optional[ + Union[dns_threat_detector.DeleteDnsThreatDetectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + async def sample_delete_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1beta1.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + await client.delete_dns_threat_detector(request=request) + + Args: + request (Optional[Union[google.cloud.network_security_v1beta1.types.DeleteDnsThreatDetectorRequest, dict]]): + The request object. The message for deleting a + DnsThreatDetector. + name (:class:`str`): + Required. Name of the + DnsThreatDetector resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.DeleteDnsThreatDetectorRequest): + request = dns_threat_detector.DeleteDnsThreatDetectorRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "DnsThreatDetectorServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("DnsThreatDetectorServiceAsyncClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/client.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/client.py new file mode 100644 index 000000000000..e6895db95e51 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/client.py @@ -0,0 +1,2056 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_security_v1beta1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1beta1.services.dns_threat_detector_service import ( + pagers, +) +from google.cloud.network_security_v1beta1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1beta1.types import dns_threat_detector + +from .transports.base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport +from .transports.grpc import DnsThreatDetectorServiceGrpcTransport +from .transports.grpc_asyncio import DnsThreatDetectorServiceGrpcAsyncIOTransport +from .transports.rest import DnsThreatDetectorServiceRestTransport + + +class DnsThreatDetectorServiceClientMeta(type): + """Metaclass for the DnsThreatDetectorService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DnsThreatDetectorServiceTransport]] + _transport_registry["grpc"] = DnsThreatDetectorServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DnsThreatDetectorServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DnsThreatDetectorServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DnsThreatDetectorServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DnsThreatDetectorServiceClient(metaclass=DnsThreatDetectorServiceClientMeta): + """The Network Security API for DNS Threat Detectors.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networksecurity.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networksecurity.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DnsThreatDetectorServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DnsThreatDetectorServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DnsThreatDetectorServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DnsThreatDetectorServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def dns_threat_detector_path( + project: str, + location: str, + dns_threat_detector: str, + ) -> str: + """Returns a fully-qualified dns_threat_detector string.""" + return "projects/{project}/locations/{location}/dnsThreatDetectors/{dns_threat_detector}".format( + project=project, + location=location, + dns_threat_detector=dns_threat_detector, + ) + + @staticmethod + def parse_dns_threat_detector_path(path: str) -> Dict[str, str]: + """Parses a dns_threat_detector path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dnsThreatDetectors/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def network_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified network string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_network_path(path: str) -> Dict[str, str]: + """Parses a network path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + DnsThreatDetectorServiceTransport, + Callable[..., DnsThreatDetectorServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dns threat detector service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DnsThreatDetectorServiceTransport,Callable[..., DnsThreatDetectorServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DnsThreatDetectorServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = DnsThreatDetectorServiceClient._read_environment_variables() + self._client_cert_source = ( + DnsThreatDetectorServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = DnsThreatDetectorServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DnsThreatDetectorServiceTransport) + if transport_provided: + # transport is a DnsThreatDetectorServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(DnsThreatDetectorServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DnsThreatDetectorServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[DnsThreatDetectorServiceTransport], + Callable[..., DnsThreatDetectorServiceTransport], + ] = ( + DnsThreatDetectorServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DnsThreatDetectorServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient`.", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "credentialsType": None, + }, + ) + + def list_dns_threat_detectors( + self, + request: Optional[ + Union[dns_threat_detector.ListDnsThreatDetectorsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDnsThreatDetectorsPager: + r"""Lists DnsThreatDetectors in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + def sample_list_dns_threat_detectors(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1beta1.ListDnsThreatDetectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dns_threat_detectors(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsRequest, dict]): + The request object. The message for requesting a list of + DnsThreatDetectors in the project. + parent (str): + Required. The parent value for + ``ListDnsThreatDetectorsRequest``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1beta1.services.dns_threat_detector_service.pagers.ListDnsThreatDetectorsPager: + The response message to requesting a + list of DnsThreatDetectors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.ListDnsThreatDetectorsRequest): + request = dns_threat_detector.ListDnsThreatDetectorsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_dns_threat_detectors + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDnsThreatDetectorsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dns_threat_detector( + self, + request: Optional[ + Union[dns_threat_detector.GetDnsThreatDetectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dns_threat_detector.DnsThreatDetector: + r"""Gets the details of a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + def sample_get_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1beta1.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + response = client.get_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1beta1.types.GetDnsThreatDetectorRequest, dict]): + The request object. The message sent to get a + DnsThreatDetector. + name (str): + Required. Name of the + DnsThreatDetector resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1beta1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.GetDnsThreatDetectorRequest): + request = dns_threat_detector.GetDnsThreatDetectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dns_threat_detector] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_dns_threat_detector( + self, + request: Optional[ + Union[gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + dns_threat_detector: Optional[gcn_dns_threat_detector.DnsThreatDetector] = None, + dns_threat_detector_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Creates a new DnsThreatDetector in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + def sample_create_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1beta1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1beta1.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = client.create_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1beta1.types.CreateDnsThreatDetectorRequest, dict]): + The request object. The message to create a + DnsThreatDetector. + parent (str): + Required. The value for the parent of + the DnsThreatDetector resource. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_threat_detector (google.cloud.network_security_v1beta1.types.DnsThreatDetector): + Required. The ``DnsThreatDetector`` resource to create. + This corresponds to the ``dns_threat_detector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + dns_threat_detector_id (str): + Optional. The ID of the requesting + DnsThreatDetector object. If this field + is not supplied, the service generates + an identifier. + + This corresponds to the ``dns_threat_detector_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1beta1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, dns_threat_detector, dns_threat_detector_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_dns_threat_detector.CreateDnsThreatDetectorRequest + ): + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if dns_threat_detector is not None: + request.dns_threat_detector = dns_threat_detector + if dns_threat_detector_id is not None: + request.dns_threat_detector_id = dns_threat_detector_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_dns_threat_detector( + self, + request: Optional[ + Union[gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, dict] + ] = None, + *, + dns_threat_detector: Optional[gcn_dns_threat_detector.DnsThreatDetector] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Updates a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + def sample_update_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1beta1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1beta1.UpdateDnsThreatDetectorRequest( + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = client.update_dns_threat_detector(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_security_v1beta1.types.UpdateDnsThreatDetectorRequest, dict]): + The request object. The message for updating a + DnsThreatDetector. + dns_threat_detector (google.cloud.network_security_v1beta1.types.DnsThreatDetector): + Required. The DnsThreatDetector + resource being updated. + + This corresponds to the ``dns_threat_detector`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The field mask is used to specify the fields + to be overwritten in the DnsThreatDetector resource by + the update. The fields specified in the update_mask are + relative to the resource, not the full request. A field + will be overwritten if it is in the mask. If the mask is + not provided then all fields present in the request will + be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_security_v1beta1.types.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a provider\_ that then + analyzes the logs to identify threat events in the + DNS queries. By default, all VPC networks in your + projects are included. You can exclude specific + networks by supplying excluded_networks. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [dns_threat_detector, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest + ): + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if dns_threat_detector is not None: + request.dns_threat_detector = dns_threat_detector + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dns_threat_detector.name", request.dns_threat_detector.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dns_threat_detector( + self, + request: Optional[ + Union[dns_threat_detector.DeleteDnsThreatDetectorRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a single DnsThreatDetector. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_security_v1beta1 + + def sample_delete_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1beta1.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + client.delete_dns_threat_detector(request=request) + + Args: + request (Union[google.cloud.network_security_v1beta1.types.DeleteDnsThreatDetectorRequest, dict]): + The request object. The message for deleting a + DnsThreatDetector. + name (str): + Required. Name of the + DnsThreatDetector resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dns_threat_detector.DeleteDnsThreatDetectorRequest): + request = dns_threat_detector.DeleteDnsThreatDetectorRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_dns_threat_detector + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "DnsThreatDetectorServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("DnsThreatDetectorServiceClient",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/pagers.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/pagers.py new file mode 100644 index 000000000000..a518120c5ae5 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/pagers.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_security_v1beta1.types import dns_threat_detector + + +class ListDnsThreatDetectorsPager: + """A pager for iterating through ``list_dns_threat_detectors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``dns_threat_detectors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDnsThreatDetectors`` requests and continue to iterate + through the ``dns_threat_detectors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dns_threat_detector.ListDnsThreatDetectorsResponse], + request: dns_threat_detector.ListDnsThreatDetectorsRequest, + response: dns_threat_detector.ListDnsThreatDetectorsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsRequest): + The initial request object. + response (google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dns_threat_detector.ListDnsThreatDetectorsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dns_threat_detector.ListDnsThreatDetectorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[dns_threat_detector.DnsThreatDetector]: + for page in self.pages: + yield from page.dns_threat_detectors + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDnsThreatDetectorsAsyncPager: + """A pager for iterating through ``list_dns_threat_detectors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``dns_threat_detectors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDnsThreatDetectors`` requests and continue to iterate + through the ``dns_threat_detectors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[dns_threat_detector.ListDnsThreatDetectorsResponse] + ], + request: dns_threat_detector.ListDnsThreatDetectorsRequest, + response: dns_threat_detector.ListDnsThreatDetectorsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsRequest): + The initial request object. + response (google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dns_threat_detector.ListDnsThreatDetectorsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[dns_threat_detector.ListDnsThreatDetectorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[dns_threat_detector.DnsThreatDetector]: + async def async_generator(): + async for page in self.pages: + for response in page.dns_threat_detectors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/README.rst b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/README.rst new file mode 100644 index 000000000000..bccbb6f152a3 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`DnsThreatDetectorServiceTransport` is the ABC for all transports. +- public child `DnsThreatDetectorServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `DnsThreatDetectorServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseDnsThreatDetectorServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `DnsThreatDetectorServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/__init__.py new file mode 100644 index 000000000000..963a0f444a76 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DnsThreatDetectorServiceTransport +from .grpc import DnsThreatDetectorServiceGrpcTransport +from .grpc_asyncio import DnsThreatDetectorServiceGrpcAsyncIOTransport +from .rest import ( + DnsThreatDetectorServiceRestInterceptor, + DnsThreatDetectorServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[DnsThreatDetectorServiceTransport]] +_transport_registry["grpc"] = DnsThreatDetectorServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DnsThreatDetectorServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DnsThreatDetectorServiceRestTransport + +__all__ = ( + "DnsThreatDetectorServiceTransport", + "DnsThreatDetectorServiceGrpcTransport", + "DnsThreatDetectorServiceGrpcAsyncIOTransport", + "DnsThreatDetectorServiceRestTransport", + "DnsThreatDetectorServiceRestInterceptor", +) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/base.py new file mode 100644 index 000000000000..39faef31a5f0 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/base.py @@ -0,0 +1,371 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.network_security_v1beta1 import gapic_version as package_version +from google.cloud.network_security_v1beta1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1beta1.types import dns_threat_detector + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DnsThreatDetectorServiceTransport(abc.ABC): + """Abstract transport class for DnsThreatDetectorService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networksecurity.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_dns_threat_detectors: gapic_v1.method.wrap_method( + self.list_dns_threat_detectors, + default_timeout=None, + client_info=client_info, + ), + self.get_dns_threat_detector: gapic_v1.method.wrap_method( + self.get_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.create_dns_threat_detector: gapic_v1.method.wrap_method( + self.create_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.update_dns_threat_detector: gapic_v1.method.wrap_method( + self.update_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.delete_dns_threat_detector: gapic_v1.method.wrap_method( + self.delete_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_dns_threat_detectors( + self, + ) -> Callable[ + [dns_threat_detector.ListDnsThreatDetectorsRequest], + Union[ + dns_threat_detector.ListDnsThreatDetectorsResponse, + Awaitable[dns_threat_detector.ListDnsThreatDetectorsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.GetDnsThreatDetectorRequest], + Union[ + dns_threat_detector.DnsThreatDetector, + Awaitable[dns_threat_detector.DnsThreatDetector], + ], + ]: + raise NotImplementedError() + + @property + def create_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.CreateDnsThreatDetectorRequest], + Union[ + gcn_dns_threat_detector.DnsThreatDetector, + Awaitable[gcn_dns_threat_detector.DnsThreatDetector], + ], + ]: + raise NotImplementedError() + + @property + def update_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest], + Union[ + gcn_dns_threat_detector.DnsThreatDetector, + Awaitable[gcn_dns_threat_detector.DnsThreatDetector], + ], + ]: + raise NotImplementedError() + + @property + def delete_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.DeleteDnsThreatDetectorRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DnsThreatDetectorServiceTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/grpc.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/grpc.py new file mode 100644 index 000000000000..2e726af62b69 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/grpc.py @@ -0,0 +1,678 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1beta1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1beta1.types import dns_threat_detector + +from .base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DnsThreatDetectorServiceGrpcTransport(DnsThreatDetectorServiceTransport): + """gRPC backend transport for DnsThreatDetectorService. + + The Network Security API for DNS Threat Detectors. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_dns_threat_detectors( + self, + ) -> Callable[ + [dns_threat_detector.ListDnsThreatDetectorsRequest], + dns_threat_detector.ListDnsThreatDetectorsResponse, + ]: + r"""Return a callable for the list dns threat detectors method over gRPC. + + Lists DnsThreatDetectors in a given project and + location. + + Returns: + Callable[[~.ListDnsThreatDetectorsRequest], + ~.ListDnsThreatDetectorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dns_threat_detectors" not in self._stubs: + self._stubs["list_dns_threat_detectors"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/ListDnsThreatDetectors", + request_serializer=dns_threat_detector.ListDnsThreatDetectorsRequest.serialize, + response_deserializer=dns_threat_detector.ListDnsThreatDetectorsResponse.deserialize, + ) + return self._stubs["list_dns_threat_detectors"] + + @property + def get_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.GetDnsThreatDetectorRequest], + dns_threat_detector.DnsThreatDetector, + ]: + r"""Return a callable for the get dns threat detector method over gRPC. + + Gets the details of a single DnsThreatDetector. + + Returns: + Callable[[~.GetDnsThreatDetectorRequest], + ~.DnsThreatDetector]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dns_threat_detector" not in self._stubs: + self._stubs["get_dns_threat_detector"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/GetDnsThreatDetector", + request_serializer=dns_threat_detector.GetDnsThreatDetectorRequest.serialize, + response_deserializer=dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["get_dns_threat_detector"] + + @property + def create_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.CreateDnsThreatDetectorRequest], + gcn_dns_threat_detector.DnsThreatDetector, + ]: + r"""Return a callable for the create dns threat detector method over gRPC. + + Creates a new DnsThreatDetector in a given project + and location. + + Returns: + Callable[[~.CreateDnsThreatDetectorRequest], + ~.DnsThreatDetector]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dns_threat_detector" not in self._stubs: + self._stubs[ + "create_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/CreateDnsThreatDetector", + request_serializer=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.serialize, + response_deserializer=gcn_dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["create_dns_threat_detector"] + + @property + def update_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest], + gcn_dns_threat_detector.DnsThreatDetector, + ]: + r"""Return a callable for the update dns threat detector method over gRPC. + + Updates a single DnsThreatDetector. + + Returns: + Callable[[~.UpdateDnsThreatDetectorRequest], + ~.DnsThreatDetector]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dns_threat_detector" not in self._stubs: + self._stubs[ + "update_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/UpdateDnsThreatDetector", + request_serializer=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.serialize, + response_deserializer=gcn_dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["update_dns_threat_detector"] + + @property + def delete_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.DeleteDnsThreatDetectorRequest], empty_pb2.Empty + ]: + r"""Return a callable for the delete dns threat detector method over gRPC. + + Deletes a single DnsThreatDetector. + + Returns: + Callable[[~.DeleteDnsThreatDetectorRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dns_threat_detector" not in self._stubs: + self._stubs[ + "delete_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/DeleteDnsThreatDetector", + request_serializer=dns_threat_detector.DeleteDnsThreatDetectorRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dns_threat_detector"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DnsThreatDetectorServiceGrpcTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/grpc_asyncio.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3049f5cbd582 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/grpc_asyncio.py @@ -0,0 +1,766 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_security_v1beta1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1beta1.types import dns_threat_detector + +from .base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport +from .grpc import DnsThreatDetectorServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DnsThreatDetectorServiceGrpcAsyncIOTransport(DnsThreatDetectorServiceTransport): + """gRPC AsyncIO backend transport for DnsThreatDetectorService. + + The Network Security API for DNS Threat Detectors. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_dns_threat_detectors( + self, + ) -> Callable[ + [dns_threat_detector.ListDnsThreatDetectorsRequest], + Awaitable[dns_threat_detector.ListDnsThreatDetectorsResponse], + ]: + r"""Return a callable for the list dns threat detectors method over gRPC. + + Lists DnsThreatDetectors in a given project and + location. + + Returns: + Callable[[~.ListDnsThreatDetectorsRequest], + Awaitable[~.ListDnsThreatDetectorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dns_threat_detectors" not in self._stubs: + self._stubs["list_dns_threat_detectors"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/ListDnsThreatDetectors", + request_serializer=dns_threat_detector.ListDnsThreatDetectorsRequest.serialize, + response_deserializer=dns_threat_detector.ListDnsThreatDetectorsResponse.deserialize, + ) + return self._stubs["list_dns_threat_detectors"] + + @property + def get_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.GetDnsThreatDetectorRequest], + Awaitable[dns_threat_detector.DnsThreatDetector], + ]: + r"""Return a callable for the get dns threat detector method over gRPC. + + Gets the details of a single DnsThreatDetector. + + Returns: + Callable[[~.GetDnsThreatDetectorRequest], + Awaitable[~.DnsThreatDetector]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dns_threat_detector" not in self._stubs: + self._stubs["get_dns_threat_detector"] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/GetDnsThreatDetector", + request_serializer=dns_threat_detector.GetDnsThreatDetectorRequest.serialize, + response_deserializer=dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["get_dns_threat_detector"] + + @property + def create_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.CreateDnsThreatDetectorRequest], + Awaitable[gcn_dns_threat_detector.DnsThreatDetector], + ]: + r"""Return a callable for the create dns threat detector method over gRPC. + + Creates a new DnsThreatDetector in a given project + and location. + + Returns: + Callable[[~.CreateDnsThreatDetectorRequest], + Awaitable[~.DnsThreatDetector]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dns_threat_detector" not in self._stubs: + self._stubs[ + "create_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/CreateDnsThreatDetector", + request_serializer=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.serialize, + response_deserializer=gcn_dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["create_dns_threat_detector"] + + @property + def update_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest], + Awaitable[gcn_dns_threat_detector.DnsThreatDetector], + ]: + r"""Return a callable for the update dns threat detector method over gRPC. + + Updates a single DnsThreatDetector. + + Returns: + Callable[[~.UpdateDnsThreatDetectorRequest], + Awaitable[~.DnsThreatDetector]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dns_threat_detector" not in self._stubs: + self._stubs[ + "update_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/UpdateDnsThreatDetector", + request_serializer=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.serialize, + response_deserializer=gcn_dns_threat_detector.DnsThreatDetector.deserialize, + ) + return self._stubs["update_dns_threat_detector"] + + @property + def delete_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.DeleteDnsThreatDetectorRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete dns threat detector method over gRPC. + + Deletes a single DnsThreatDetector. + + Returns: + Callable[[~.DeleteDnsThreatDetectorRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dns_threat_detector" not in self._stubs: + self._stubs[ + "delete_dns_threat_detector" + ] = self._logged_channel.unary_unary( + "/google.cloud.networksecurity.v1beta1.DnsThreatDetectorService/DeleteDnsThreatDetector", + request_serializer=dns_threat_detector.DeleteDnsThreatDetectorRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_dns_threat_detector"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_dns_threat_detectors: self._wrap_method( + self.list_dns_threat_detectors, + default_timeout=None, + client_info=client_info, + ), + self.get_dns_threat_detector: self._wrap_method( + self.get_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.create_dns_threat_detector: self._wrap_method( + self.create_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.update_dns_threat_detector: self._wrap_method( + self.update_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.delete_dns_threat_detector: self._wrap_method( + self.delete_dns_threat_detector, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("DnsThreatDetectorServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/rest.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/rest.py new file mode 100644 index 000000000000..4c9f289bee49 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/rest.py @@ -0,0 +1,2712 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_security_v1beta1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1beta1.types import dns_threat_detector + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseDnsThreatDetectorServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DnsThreatDetectorServiceRestInterceptor: + """Interceptor for DnsThreatDetectorService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DnsThreatDetectorServiceRestTransport. + + .. code-block:: python + class MyCustomDnsThreatDetectorServiceInterceptor(DnsThreatDetectorServiceRestInterceptor): + def pre_create_dns_threat_detector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_dns_threat_detector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_dns_threat_detector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_dns_threat_detector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_dns_threat_detector(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_dns_threat_detectors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_dns_threat_detectors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_dns_threat_detector(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_dns_threat_detector(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DnsThreatDetectorServiceRestTransport(interceptor=MyCustomDnsThreatDetectorServiceInterceptor()) + client = DnsThreatDetectorServiceClient(transport=transport) + + + """ + + def pre_create_dns_threat_detector( + self, + request: gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_dns_threat_detector + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_create_dns_threat_detector( + self, response: gcn_dns_threat_detector.DnsThreatDetector + ) -> gcn_dns_threat_detector.DnsThreatDetector: + """Post-rpc interceptor for create_dns_threat_detector + + DEPRECATED. Please use the `post_create_dns_threat_detector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. This `post_create_dns_threat_detector` interceptor runs + before the `post_create_dns_threat_detector_with_metadata` interceptor. + """ + return response + + def post_create_dns_threat_detector_with_metadata( + self, + response: gcn_dns_threat_detector.DnsThreatDetector, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_dns_threat_detector.DnsThreatDetector, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for create_dns_threat_detector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DnsThreatDetectorService server but before it is returned to user code. + + We recommend only using this `post_create_dns_threat_detector_with_metadata` + interceptor in new development instead of the `post_create_dns_threat_detector` interceptor. + When both interceptors are used, this `post_create_dns_threat_detector_with_metadata` interceptor runs after the + `post_create_dns_threat_detector` interceptor. The (possibly modified) response returned by + `post_create_dns_threat_detector` will be passed to + `post_create_dns_threat_detector_with_metadata`. + """ + return response, metadata + + def pre_delete_dns_threat_detector( + self, + request: dns_threat_detector.DeleteDnsThreatDetectorRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.DeleteDnsThreatDetectorRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_dns_threat_detector + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def pre_get_dns_threat_detector( + self, + request: dns_threat_detector.GetDnsThreatDetectorRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.GetDnsThreatDetectorRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_dns_threat_detector + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_get_dns_threat_detector( + self, response: dns_threat_detector.DnsThreatDetector + ) -> dns_threat_detector.DnsThreatDetector: + """Post-rpc interceptor for get_dns_threat_detector + + DEPRECATED. Please use the `post_get_dns_threat_detector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. This `post_get_dns_threat_detector` interceptor runs + before the `post_get_dns_threat_detector_with_metadata` interceptor. + """ + return response + + def post_get_dns_threat_detector_with_metadata( + self, + response: dns_threat_detector.DnsThreatDetector, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.DnsThreatDetector, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_dns_threat_detector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DnsThreatDetectorService server but before it is returned to user code. + + We recommend only using this `post_get_dns_threat_detector_with_metadata` + interceptor in new development instead of the `post_get_dns_threat_detector` interceptor. + When both interceptors are used, this `post_get_dns_threat_detector_with_metadata` interceptor runs after the + `post_get_dns_threat_detector` interceptor. The (possibly modified) response returned by + `post_get_dns_threat_detector` will be passed to + `post_get_dns_threat_detector_with_metadata`. + """ + return response, metadata + + def pre_list_dns_threat_detectors( + self, + request: dns_threat_detector.ListDnsThreatDetectorsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.ListDnsThreatDetectorsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_dns_threat_detectors + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_list_dns_threat_detectors( + self, response: dns_threat_detector.ListDnsThreatDetectorsResponse + ) -> dns_threat_detector.ListDnsThreatDetectorsResponse: + """Post-rpc interceptor for list_dns_threat_detectors + + DEPRECATED. Please use the `post_list_dns_threat_detectors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. This `post_list_dns_threat_detectors` interceptor runs + before the `post_list_dns_threat_detectors_with_metadata` interceptor. + """ + return response + + def post_list_dns_threat_detectors_with_metadata( + self, + response: dns_threat_detector.ListDnsThreatDetectorsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dns_threat_detector.ListDnsThreatDetectorsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_dns_threat_detectors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DnsThreatDetectorService server but before it is returned to user code. + + We recommend only using this `post_list_dns_threat_detectors_with_metadata` + interceptor in new development instead of the `post_list_dns_threat_detectors` interceptor. + When both interceptors are used, this `post_list_dns_threat_detectors_with_metadata` interceptor runs after the + `post_list_dns_threat_detectors` interceptor. The (possibly modified) response returned by + `post_list_dns_threat_detectors` will be passed to + `post_list_dns_threat_detectors_with_metadata`. + """ + return response, metadata + + def pre_update_dns_threat_detector( + self, + request: gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_dns_threat_detector + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_update_dns_threat_detector( + self, response: gcn_dns_threat_detector.DnsThreatDetector + ) -> gcn_dns_threat_detector.DnsThreatDetector: + """Post-rpc interceptor for update_dns_threat_detector + + DEPRECATED. Please use the `post_update_dns_threat_detector_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. This `post_update_dns_threat_detector` interceptor runs + before the `post_update_dns_threat_detector_with_metadata` interceptor. + """ + return response + + def post_update_dns_threat_detector_with_metadata( + self, + response: gcn_dns_threat_detector.DnsThreatDetector, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcn_dns_threat_detector.DnsThreatDetector, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for update_dns_threat_detector + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DnsThreatDetectorService server but before it is returned to user code. + + We recommend only using this `post_update_dns_threat_detector_with_metadata` + interceptor in new development instead of the `post_update_dns_threat_detector` interceptor. + When both interceptors are used, this `post_update_dns_threat_detector_with_metadata` interceptor runs after the + `post_update_dns_threat_detector` interceptor. The (possibly modified) response returned by + `post_update_dns_threat_detector` will be passed to + `post_update_dns_threat_detector_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DnsThreatDetectorService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DnsThreatDetectorService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DnsThreatDetectorServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DnsThreatDetectorServiceRestInterceptor + + +class DnsThreatDetectorServiceRestTransport(_BaseDnsThreatDetectorServiceRestTransport): + """REST backend synchronous transport for DnsThreatDetectorService. + + The Network Security API for DNS Threat Detectors. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DnsThreatDetectorServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DnsThreatDetectorServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateDnsThreatDetector( + _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.CreateDnsThreatDetector") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Call the create dns threat + detector method over HTTP. + + Args: + request (~.gcn_dns_threat_detector.CreateDnsThreatDetectorRequest): + The request object. The message to create a + DnsThreatDetector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcn_dns_threat_detector.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a + *provider* that then analyzes the logs to identify + threat events in the DNS queries. By default, all VPC + networks in your projects are included. You can exclude + specific networks by supplying ``excluded_networks``. + + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_dns_threat_detector( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.CreateDnsThreatDetector", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "CreateDnsThreatDetector", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._CreateDnsThreatDetector._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcn_dns_threat_detector.DnsThreatDetector() + pb_resp = gcn_dns_threat_detector.DnsThreatDetector.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_dns_threat_detector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_dns_threat_detector_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + gcn_dns_threat_detector.DnsThreatDetector.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.create_dns_threat_detector", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "CreateDnsThreatDetector", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteDnsThreatDetector( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.DeleteDnsThreatDetector") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dns_threat_detector.DeleteDnsThreatDetectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete dns threat + detector method over HTTP. + + Args: + request (~.dns_threat_detector.DeleteDnsThreatDetectorRequest): + The request object. The message for deleting a + DnsThreatDetector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_dns_threat_detector( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.DeleteDnsThreatDetector", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "DeleteDnsThreatDetector", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._DeleteDnsThreatDetector._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDnsThreatDetector( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.GetDnsThreatDetector") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dns_threat_detector.GetDnsThreatDetectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dns_threat_detector.DnsThreatDetector: + r"""Call the get dns threat detector method over HTTP. + + Args: + request (~.dns_threat_detector.GetDnsThreatDetectorRequest): + The request object. The message sent to get a + DnsThreatDetector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dns_threat_detector.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a + *provider* that then analyzes the logs to identify + threat events in the DNS queries. By default, all VPC + networks in your projects are included. You can exclude + specific networks by supplying ``excluded_networks``. + + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_dns_threat_detector( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.GetDnsThreatDetector", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "GetDnsThreatDetector", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._GetDnsThreatDetector._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dns_threat_detector.DnsThreatDetector() + pb_resp = dns_threat_detector.DnsThreatDetector.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_dns_threat_detector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_dns_threat_detector_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = dns_threat_detector.DnsThreatDetector.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.get_dns_threat_detector", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "GetDnsThreatDetector", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDnsThreatDetectors( + _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.ListDnsThreatDetectors") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dns_threat_detector.ListDnsThreatDetectorsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dns_threat_detector.ListDnsThreatDetectorsResponse: + r"""Call the list dns threat detectors method over HTTP. + + Args: + request (~.dns_threat_detector.ListDnsThreatDetectorsRequest): + The request object. The message for requesting a list of + DnsThreatDetectors in the project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.dns_threat_detector.ListDnsThreatDetectorsResponse: + The response message to requesting a + list of DnsThreatDetectors. + + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_dns_threat_detectors( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.ListDnsThreatDetectors", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "ListDnsThreatDetectors", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._ListDnsThreatDetectors._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = dns_threat_detector.ListDnsThreatDetectorsResponse() + pb_resp = dns_threat_detector.ListDnsThreatDetectorsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_dns_threat_detectors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_dns_threat_detectors_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + dns_threat_detector.ListDnsThreatDetectorsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.list_dns_threat_detectors", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "ListDnsThreatDetectors", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDnsThreatDetector( + _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.UpdateDnsThreatDetector") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcn_dns_threat_detector.DnsThreatDetector: + r"""Call the update dns threat + detector method over HTTP. + + Args: + request (~.gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest): + The request object. The message for updating a + DnsThreatDetector. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcn_dns_threat_detector.DnsThreatDetector: + A DNS threat detector sends DNS query logs to a + *provider* that then analyzes the logs to identify + threat events in the DNS queries. By default, all VPC + networks in your projects are included. You can exclude + specific networks by supplying ``excluded_networks``. + + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_dns_threat_detector( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.UpdateDnsThreatDetector", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "UpdateDnsThreatDetector", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._UpdateDnsThreatDetector._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcn_dns_threat_detector.DnsThreatDetector() + pb_resp = gcn_dns_threat_detector.DnsThreatDetector.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_dns_threat_detector(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_dns_threat_detector_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + gcn_dns_threat_detector.DnsThreatDetector.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.update_dns_threat_detector", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "UpdateDnsThreatDetector", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.CreateDnsThreatDetectorRequest], + gcn_dns_threat_detector.DnsThreatDetector, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDnsThreatDetector(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.DeleteDnsThreatDetectorRequest], empty_pb2.Empty + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDnsThreatDetector(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_dns_threat_detector( + self, + ) -> Callable[ + [dns_threat_detector.GetDnsThreatDetectorRequest], + dns_threat_detector.DnsThreatDetector, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDnsThreatDetector(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_dns_threat_detectors( + self, + ) -> Callable[ + [dns_threat_detector.ListDnsThreatDetectorsRequest], + dns_threat_detector.ListDnsThreatDetectorsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDnsThreatDetectors(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_dns_threat_detector( + self, + ) -> Callable[ + [gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest], + gcn_dns_threat_detector.DnsThreatDetector, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDnsThreatDetector(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetLocation, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DnsThreatDetectorServiceRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseDnsThreatDetectorServiceRestTransport._BaseListLocations, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetIamPolicy, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteOperation, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetOperation, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseDnsThreatDetectorServiceRestTransport._BaseListOperations, + DnsThreatDetectorServiceRestStub, + ): + def __hash__(self): + return hash("DnsThreatDetectorServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseDnsThreatDetectorServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDnsThreatDetectorServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDnsThreatDetectorServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DnsThreatDetectorServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networksecurity_v1beta1.DnsThreatDetectorServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DnsThreatDetectorServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/rest_base.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/rest_base.py new file mode 100644 index 000000000000..10523101b2c5 --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/services/dns_threat_detector_service/transports/rest_base.py @@ -0,0 +1,629 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_security_v1beta1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1beta1.types import dns_threat_detector + +from .base import DEFAULT_CLIENT_INFO, DnsThreatDetectorServiceTransport + + +class _BaseDnsThreatDetectorServiceRestTransport(DnsThreatDetectorServiceTransport): + """Base REST backend transport for DnsThreatDetectorService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networksecurity.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networksecurity.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateDnsThreatDetector: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{parent=projects/*/locations/*}/dnsThreatDetectors", + "body": "dns_threat_detector", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseCreateDnsThreatDetector._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDnsThreatDetector: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta1/{name=projects/*/locations/*/dnsThreatDetectors/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dns_threat_detector.DeleteDnsThreatDetectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseDeleteDnsThreatDetector._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDnsThreatDetector: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/dnsThreatDetectors/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dns_threat_detector.GetDnsThreatDetectorRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseGetDnsThreatDetector._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDnsThreatDetectors: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{parent=projects/*/locations/*}/dnsThreatDetectors", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dns_threat_detector.ListDnsThreatDetectorsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseListDnsThreatDetectors._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDnsThreatDetector: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1beta1/{dns_threat_detector.name=projects/*/locations/*/dnsThreatDetectors/*}", + "body": "dns_threat_detector", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDnsThreatDetectorServiceRestTransport._BaseUpdateDnsThreatDetector._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{resource=projects/*/locations/*/authorizationPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1beta1/{resource=projects/*/locations/*/serverTlsPolicies/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1beta1/{resource=projects/*/locations/*/clientTlsPolicies/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{resource=projects/*/locations/*/authorizationPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta1/{resource=projects/*/locations/*/serverTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta1/{resource=projects/*/locations/*/clientTlsPolicies/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{resource=projects/*/locations/*/authorizationPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta1/{resource=projects/*/locations/*/serverTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1beta1/{resource=projects/*/locations/*/clientTlsPolicies/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta1/{name=projects/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{name=projects/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseDnsThreatDetectorServiceRestTransport",) diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/types/__init__.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/types/__init__.py index d2d984499bf2..2abd2306932d 100644 --- a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/types/__init__.py +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/types/__init__.py @@ -32,6 +32,15 @@ UpdateClientTlsPolicyRequest, ) from .common import OperationMetadata +from .dns_threat_detector import ( + CreateDnsThreatDetectorRequest, + DeleteDnsThreatDetectorRequest, + DnsThreatDetector, + GetDnsThreatDetectorRequest, + ListDnsThreatDetectorsRequest, + ListDnsThreatDetectorsResponse, + UpdateDnsThreatDetectorRequest, +) from .server_tls_policy import ( CreateServerTlsPolicyRequest, DeleteServerTlsPolicyRequest, @@ -64,6 +73,13 @@ "ListClientTlsPoliciesResponse", "UpdateClientTlsPolicyRequest", "OperationMetadata", + "CreateDnsThreatDetectorRequest", + "DeleteDnsThreatDetectorRequest", + "DnsThreatDetector", + "GetDnsThreatDetectorRequest", + "ListDnsThreatDetectorsRequest", + "ListDnsThreatDetectorsResponse", + "UpdateDnsThreatDetectorRequest", "CreateServerTlsPolicyRequest", "DeleteServerTlsPolicyRequest", "GetServerTlsPolicyRequest", diff --git a/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/types/dns_threat_detector.py b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/types/dns_threat_detector.py new file mode 100644 index 000000000000..956e669f518f --- /dev/null +++ b/packages/google-cloud-network-security/google/cloud/network_security_v1beta1/types/dns_threat_detector.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.networksecurity.v1beta1", + manifest={ + "DnsThreatDetector", + "ListDnsThreatDetectorsRequest", + "ListDnsThreatDetectorsResponse", + "GetDnsThreatDetectorRequest", + "CreateDnsThreatDetectorRequest", + "UpdateDnsThreatDetectorRequest", + "DeleteDnsThreatDetectorRequest", + }, +) + + +class DnsThreatDetector(proto.Message): + r"""A DNS threat detector sends DNS query logs to a *provider* that then + analyzes the logs to identify threat events in the DNS queries. By + default, all VPC networks in your projects are included. You can + exclude specific networks by supplying ``excluded_networks``. + + Attributes: + name (str): + Immutable. Identifier. Name of the + DnsThreatDetector resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Create time stamp. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Update time stamp. + labels (MutableMapping[str, str]): + Optional. Any labels associated with the + DnsThreatDetector, listed as key value pairs. + excluded_networks (MutableSequence[str]): + Optional. A list of network resource names which aren't + monitored by this DnsThreatDetector. + + Example: + ``projects/PROJECT_ID/global/networks/NETWORK_NAME``. + provider (google.cloud.network_security_v1beta1.types.DnsThreatDetector.Provider): + Required. The provider used for DNS threat + analysis. + """ + + class Provider(proto.Enum): + r"""Name of the provider used for DNS threat analysis. + + Values: + PROVIDER_UNSPECIFIED (0): + An unspecified provider. + INFOBLOX (1): + The Infoblox DNS threat detector provider. + """ + PROVIDER_UNSPECIFIED = 0 + INFOBLOX = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + excluded_networks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + provider: Provider = proto.Field( + proto.ENUM, + number=6, + enum=Provider, + ) + + +class ListDnsThreatDetectorsRequest(proto.Message): + r"""The message for requesting a list of DnsThreatDetectors in + the project. + + Attributes: + parent (str): + Required. The parent value for + ``ListDnsThreatDetectorsRequest``. + page_size (int): + Optional. The requested page size. The server + may return fewer items than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A page token received from a previous + ``ListDnsThreatDetectorsRequest`` call. Provide this to + retrieve the subsequent page. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDnsThreatDetectorsResponse(proto.Message): + r"""The response message to requesting a list of + DnsThreatDetectors. + + Attributes: + dns_threat_detectors (MutableSequence[google.cloud.network_security_v1beta1.types.DnsThreatDetector]): + The list of DnsThreatDetector resources. + next_page_token (str): + A token, which can be sent as ``page_token``, to retrieve + the next page. + unreachable (MutableSequence[str]): + Unordered list. Unreachable ``DnsThreatDetector`` resources. + """ + + @property + def raw_page(self): + return self + + dns_threat_detectors: MutableSequence["DnsThreatDetector"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DnsThreatDetector", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetDnsThreatDetectorRequest(proto.Message): + r"""The message sent to get a DnsThreatDetector. + + Attributes: + name (str): + Required. Name of the DnsThreatDetector + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDnsThreatDetectorRequest(proto.Message): + r"""The message to create a DnsThreatDetector. + + Attributes: + parent (str): + Required. The value for the parent of the + DnsThreatDetector resource. + dns_threat_detector_id (str): + Optional. The ID of the requesting + DnsThreatDetector object. If this field is not + supplied, the service generates an identifier. + dns_threat_detector (google.cloud.network_security_v1beta1.types.DnsThreatDetector): + Required. The ``DnsThreatDetector`` resource to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + dns_threat_detector_id: str = proto.Field( + proto.STRING, + number=2, + ) + dns_threat_detector: "DnsThreatDetector" = proto.Field( + proto.MESSAGE, + number=3, + message="DnsThreatDetector", + ) + + +class UpdateDnsThreatDetectorRequest(proto.Message): + r"""The message for updating a DnsThreatDetector. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The field mask is used to specify the fields to be + overwritten in the DnsThreatDetector resource by the update. + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the mask is not provided then all + fields present in the request will be overwritten. + dns_threat_detector (google.cloud.network_security_v1beta1.types.DnsThreatDetector): + Required. The DnsThreatDetector resource + being updated. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + dns_threat_detector: "DnsThreatDetector" = proto.Field( + proto.MESSAGE, + number=2, + message="DnsThreatDetector", + ) + + +class DeleteDnsThreatDetectorRequest(proto.Message): + r"""The message for deleting a DnsThreatDetector. + + Attributes: + name (str): + Required. Name of the DnsThreatDetector + resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py new file mode 100644 index 000000000000..6ffab5edbdab --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +async def sample_create_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1beta1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1beta1.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = await client.create_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py new file mode 100644 index 000000000000..bdd8a649232a --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +def sample_create_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1beta1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1beta1.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = client.create_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py new file mode 100644 index 000000000000..15c0e888d7b8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +async def sample_delete_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1beta1.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + await client.delete_dns_threat_detector(request=request) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py new file mode 100644 index 000000000000..4926a947243d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +def sample_delete_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1beta1.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + client.delete_dns_threat_detector(request=request) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py new file mode 100644 index 000000000000..0226ebdfa10d --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_GetDnsThreatDetector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +async def sample_get_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1beta1.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + response = await client.get_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_GetDnsThreatDetector_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py new file mode 100644 index 000000000000..f6b73ab3b25e --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_GetDnsThreatDetector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +def sample_get_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1beta1.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Make the request + response = client.get_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_GetDnsThreatDetector_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py new file mode 100644 index 000000000000..a80a640a669f --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDnsThreatDetectors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +async def sample_list_dns_threat_detectors(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + request = network_security_v1beta1.ListDnsThreatDetectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dns_threat_detectors(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py new file mode 100644 index 000000000000..703166347394 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDnsThreatDetectors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +def sample_list_dns_threat_detectors(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + request = network_security_v1beta1.ListDnsThreatDetectorsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_dns_threat_detectors(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py new file mode 100644 index 000000000000..0a9de2270bd8 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +async def sample_update_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceAsyncClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1beta1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1beta1.UpdateDnsThreatDetectorRequest( + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = await client.update_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_async] diff --git a/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py new file mode 100644 index 000000000000..7f5efcda2a69 --- /dev/null +++ b/packages/google-cloud-network-security/samples/generated_samples/networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDnsThreatDetector +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-security + + +# [START networksecurity_v1beta1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_security_v1beta1 + + +def sample_update_dns_threat_detector(): + # Create a client + client = network_security_v1beta1.DnsThreatDetectorServiceClient() + + # Initialize request argument(s) + dns_threat_detector = network_security_v1beta1.DnsThreatDetector() + dns_threat_detector.provider = "INFOBLOX" + + request = network_security_v1beta1.UpdateDnsThreatDetectorRequest( + dns_threat_detector=dns_threat_detector, + ) + + # Make the request + response = client.update_dns_threat_detector(request=request) + + # Handle the response + print(response) + + +# [END networksecurity_v1beta1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_sync] diff --git a/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1beta1.json b/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1beta1.json index 0407c848db48..e29570c6bcb9 100644 --- a/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1beta1.json +++ b/packages/google-cloud-network-security/samples/generated_samples/snippet_metadata_google.cloud.networksecurity.v1beta1.json @@ -11,6 +11,829 @@ "version": "0.9.21" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient.create_dns_threat_detector", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.CreateDnsThreatDetector", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "CreateDnsThreatDetector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.CreateDnsThreatDetectorRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "dns_threat_detector", + "type": "google.cloud.network_security_v1beta1.types.DnsThreatDetector" + }, + { + "name": "dns_threat_detector_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1beta1.types.DnsThreatDetector", + "shortName": "create_dns_threat_detector" + }, + "description": "Sample for CreateDnsThreatDetector", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient.create_dns_threat_detector", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.CreateDnsThreatDetector", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "CreateDnsThreatDetector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.CreateDnsThreatDetectorRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "dns_threat_detector", + "type": "google.cloud.network_security_v1beta1.types.DnsThreatDetector" + }, + { + "name": "dns_threat_detector_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1beta1.types.DnsThreatDetector", + "shortName": "create_dns_threat_detector" + }, + "description": "Sample for CreateDnsThreatDetector", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_CreateDnsThreatDetector_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_create_dns_threat_detector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient.delete_dns_threat_detector", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.DeleteDnsThreatDetector", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "DeleteDnsThreatDetector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.DeleteDnsThreatDetectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_dns_threat_detector" + }, + "description": "Sample for DeleteDnsThreatDetector", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient.delete_dns_threat_detector", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.DeleteDnsThreatDetector", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "DeleteDnsThreatDetector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.DeleteDnsThreatDetectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_dns_threat_detector" + }, + "description": "Sample for DeleteDnsThreatDetector", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_DeleteDnsThreatDetector_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_delete_dns_threat_detector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient.get_dns_threat_detector", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.GetDnsThreatDetector", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "GetDnsThreatDetector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.GetDnsThreatDetectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1beta1.types.DnsThreatDetector", + "shortName": "get_dns_threat_detector" + }, + "description": "Sample for GetDnsThreatDetector", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_GetDnsThreatDetector_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient.get_dns_threat_detector", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.GetDnsThreatDetector", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "GetDnsThreatDetector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.GetDnsThreatDetectorRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1beta1.types.DnsThreatDetector", + "shortName": "get_dns_threat_detector" + }, + "description": "Sample for GetDnsThreatDetector", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_GetDnsThreatDetector_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_get_dns_threat_detector_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient.list_dns_threat_detectors", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.ListDnsThreatDetectors", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "ListDnsThreatDetectors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1beta1.services.dns_threat_detector_service.pagers.ListDnsThreatDetectorsAsyncPager", + "shortName": "list_dns_threat_detectors" + }, + "description": "Sample for ListDnsThreatDetectors", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient.list_dns_threat_detectors", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.ListDnsThreatDetectors", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "ListDnsThreatDetectors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.ListDnsThreatDetectorsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1beta1.services.dns_threat_detector_service.pagers.ListDnsThreatDetectorsPager", + "shortName": "list_dns_threat_detectors" + }, + "description": "Sample for ListDnsThreatDetectors", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_ListDnsThreatDetectors_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_list_dns_threat_detectors_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient", + "shortName": "DnsThreatDetectorServiceAsyncClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceAsyncClient.update_dns_threat_detector", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.UpdateDnsThreatDetector", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "UpdateDnsThreatDetector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.UpdateDnsThreatDetectorRequest" + }, + { + "name": "dns_threat_detector", + "type": "google.cloud.network_security_v1beta1.types.DnsThreatDetector" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1beta1.types.DnsThreatDetector", + "shortName": "update_dns_threat_detector" + }, + "description": "Sample for UpdateDnsThreatDetector", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient", + "shortName": "DnsThreatDetectorServiceClient" + }, + "fullName": "google.cloud.network_security_v1beta1.DnsThreatDetectorServiceClient.update_dns_threat_detector", + "method": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService.UpdateDnsThreatDetector", + "service": { + "fullName": "google.cloud.networksecurity.v1beta1.DnsThreatDetectorService", + "shortName": "DnsThreatDetectorService" + }, + "shortName": "UpdateDnsThreatDetector" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_security_v1beta1.types.UpdateDnsThreatDetectorRequest" + }, + { + "name": "dns_threat_detector", + "type": "google.cloud.network_security_v1beta1.types.DnsThreatDetector" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_security_v1beta1.types.DnsThreatDetector", + "shortName": "update_dns_threat_detector" + }, + "description": "Sample for UpdateDnsThreatDetector", + "file": "networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networksecurity_v1beta1_generated_DnsThreatDetectorService_UpdateDnsThreatDetector_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networksecurity_v1beta1_generated_dns_threat_detector_service_update_dns_threat_detector_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-network-security/scripts/fixup_network_security_v1beta1_keywords.py b/packages/google-cloud-network-security/scripts/fixup_network_security_v1beta1_keywords.py index f383005222af..d0bd99eb86b1 100644 --- a/packages/google-cloud-network-security/scripts/fixup_network_security_v1beta1_keywords.py +++ b/packages/google-cloud-network-security/scripts/fixup_network_security_v1beta1_keywords.py @@ -41,18 +41,23 @@ class network_securityCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_authorization_policy': ('parent', 'authorization_policy_id', 'authorization_policy', ), 'create_client_tls_policy': ('parent', 'client_tls_policy_id', 'client_tls_policy', ), + 'create_dns_threat_detector': ('parent', 'dns_threat_detector', 'dns_threat_detector_id', ), 'create_server_tls_policy': ('parent', 'server_tls_policy_id', 'server_tls_policy', ), 'delete_authorization_policy': ('name', ), 'delete_client_tls_policy': ('name', ), + 'delete_dns_threat_detector': ('name', ), 'delete_server_tls_policy': ('name', ), 'get_authorization_policy': ('name', ), 'get_client_tls_policy': ('name', ), + 'get_dns_threat_detector': ('name', ), 'get_server_tls_policy': ('name', ), 'list_authorization_policies': ('parent', 'page_size', 'page_token', ), 'list_client_tls_policies': ('parent', 'page_size', 'page_token', ), + 'list_dns_threat_detectors': ('parent', 'page_size', 'page_token', ), 'list_server_tls_policies': ('parent', 'page_size', 'page_token', ), 'update_authorization_policy': ('authorization_policy', 'update_mask', ), 'update_client_tls_policy': ('client_tls_policy', 'update_mask', ), + 'update_dns_threat_detector': ('dns_threat_detector', 'update_mask', ), 'update_server_tls_policy': ('server_tls_policy', 'update_mask', ), } diff --git a/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_dns_threat_detector_service.py b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_dns_threat_detector_service.py new file mode 100644 index 000000000000..a41dc5d72274 --- /dev/null +++ b/packages/google-cloud-network-security/tests/unit/gapic/network_security_v1beta1/test_dns_threat_detector_service.py @@ -0,0 +1,8213 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_security_v1beta1.services.dns_threat_detector_service import ( + DnsThreatDetectorServiceAsyncClient, + DnsThreatDetectorServiceClient, + pagers, + transports, +) +from google.cloud.network_security_v1beta1.types import ( + dns_threat_detector as gcn_dns_threat_detector, +) +from google.cloud.network_security_v1beta1.types import dns_threat_detector + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DnsThreatDetectorServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + DnsThreatDetectorServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DnsThreatDetectorServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DnsThreatDetectorServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DnsThreatDetectorServiceClient._get_client_cert_source(None, False) is None + assert ( + DnsThreatDetectorServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + DnsThreatDetectorServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DnsThreatDetectorServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DnsThreatDetectorServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DnsThreatDetectorServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + default_endpoint = DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DnsThreatDetectorServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + DnsThreatDetectorServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DnsThreatDetectorServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DnsThreatDetectorServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DnsThreatDetectorServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DnsThreatDetectorServiceClient._get_universe_domain(None, None) + == DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DnsThreatDetectorServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DnsThreatDetectorServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DnsThreatDetectorServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DnsThreatDetectorServiceClient, "grpc"), + (DnsThreatDetectorServiceAsyncClient, "grpc_asyncio"), + (DnsThreatDetectorServiceClient, "rest"), + ], +) +def test_dns_threat_detector_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DnsThreatDetectorServiceGrpcTransport, "grpc"), + (transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DnsThreatDetectorServiceRestTransport, "rest"), + ], +) +def test_dns_threat_detector_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DnsThreatDetectorServiceClient, "grpc"), + (DnsThreatDetectorServiceAsyncClient, "grpc_asyncio"), + (DnsThreatDetectorServiceClient, "rest"), + ], +) +def test_dns_threat_detector_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +def test_dns_threat_detector_service_client_get_transport_class(): + transport = DnsThreatDetectorServiceClient.get_transport_class() + available_transports = [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceRestTransport, + ] + assert transport in available_transports + + transport = DnsThreatDetectorServiceClient.get_transport_class("grpc") + assert transport == transports.DnsThreatDetectorServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + DnsThreatDetectorServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceAsyncClient), +) +def test_dns_threat_detector_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + DnsThreatDetectorServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + DnsThreatDetectorServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + "true", + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + "false", + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + "true", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DnsThreatDetectorServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dns_threat_detector_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [DnsThreatDetectorServiceClient, DnsThreatDetectorServiceAsyncClient], +) +@mock.patch.object( + DnsThreatDetectorServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DnsThreatDetectorServiceAsyncClient), +) +def test_dns_threat_detector_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [DnsThreatDetectorServiceClient, DnsThreatDetectorServiceAsyncClient], +) +@mock.patch.object( + DnsThreatDetectorServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceClient), +) +@mock.patch.object( + DnsThreatDetectorServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DnsThreatDetectorServiceAsyncClient), +) +def test_dns_threat_detector_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DnsThreatDetectorServiceClient._DEFAULT_UNIVERSE + default_endpoint = DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DnsThreatDetectorServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + ), + ], +) +def test_dns_threat_detector_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceRestTransport, + "rest", + None, + ), + ], +) +def test_dns_threat_detector_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_dns_threat_detector_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_security_v1beta1.services.dns_threat_detector_service.transports.DnsThreatDetectorServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DnsThreatDetectorServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_dns_threat_detector_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.ListDnsThreatDetectorsRequest, + dict, + ], +) +def test_list_dns_threat_detectors(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDnsThreatDetectorsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_dns_threat_detectors_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dns_threat_detector.ListDnsThreatDetectorsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_dns_threat_detectors(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dns_threat_detector.ListDnsThreatDetectorsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_dns_threat_detectors_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_dns_threat_detectors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_dns_threat_detectors + ] = mock_rpc + request = {} + client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_dns_threat_detectors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_dns_threat_detectors + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_dns_threat_detectors + ] = mock_rpc + + request = {} + await client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_dns_threat_detectors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async( + transport: str = "grpc_asyncio", + request_type=dns_threat_detector.ListDnsThreatDetectorsRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.ListDnsThreatDetectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDnsThreatDetectorsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async_from_dict(): + await test_list_dns_threat_detectors_async(request_type=dict) + + +def test_list_dns_threat_detectors_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.ListDnsThreatDetectorsResponse() + ) + await client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_dns_threat_detectors_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dns_threat_detectors( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_dns_threat_detectors_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dns_threat_detectors( + dns_threat_detector.ListDnsThreatDetectorsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.ListDnsThreatDetectorsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dns_threat_detectors( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dns_threat_detectors( + dns_threat_detector.ListDnsThreatDetectorsRequest(), + parent="parent_value", + ) + + +def test_list_dns_threat_detectors_pager(transport_name: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_dns_threat_detectors( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, dns_threat_detector.DnsThreatDetector) for i in results + ) + + +def test_list_dns_threat_detectors_pages(transport_name: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dns_threat_detectors(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async_pager(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dns_threat_detectors( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, dns_threat_detector.DnsThreatDetector) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_async_pages(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_dns_threat_detectors(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.GetDnsThreatDetectorRequest, + dict, + ], +) +def test_get_dns_threat_detector(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + response = client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.GetDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert response.provider == dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + + +def test_get_dns_threat_detector_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dns_threat_detector.GetDnsThreatDetectorRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_dns_threat_detector(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dns_threat_detector.GetDnsThreatDetectorRequest( + name="name_value", + ) + + +def test_get_dns_threat_detector_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_dns_threat_detector + ] = mock_rpc + request = {} + client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_dns_threat_detector + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_dns_threat_detector + ] = mock_rpc + + request = {} + await client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_async( + transport: str = "grpc_asyncio", + request_type=dns_threat_detector.GetDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + response = await client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.GetDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert response.provider == dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_async_from_dict(): + await test_get_dns_threat_detector_async(request_type=dict) + + +def test_get_dns_threat_detector_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.GetDnsThreatDetectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + call.return_value = dns_threat_detector.DnsThreatDetector() + client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.GetDnsThreatDetectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.DnsThreatDetector() + ) + await client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_dns_threat_detector_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.DnsThreatDetector() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dns_threat_detector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_dns_threat_detector_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dns_threat_detector( + dns_threat_detector.GetDnsThreatDetectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dns_threat_detector.DnsThreatDetector() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.DnsThreatDetector() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dns_threat_detector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_dns_threat_detector_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dns_threat_detector( + dns_threat_detector.GetDnsThreatDetectorRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + dict, + ], +) +def test_create_dns_threat_detector(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + response = client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +def test_create_dns_threat_detector_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_dns_threat_detector(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_dns_threat_detector.CreateDnsThreatDetectorRequest( + parent="parent_value", + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + +def test_create_dns_threat_detector_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_dns_threat_detector + ] = mock_rpc + request = {} + client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_dns_threat_detector + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_dns_threat_detector + ] = mock_rpc + + request = {} + await client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_async( + transport: str = "grpc_asyncio", + request_type=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + response = await client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_async_from_dict(): + await test_create_dns_threat_detector_async(request_type=dict) + + +def test_create_dns_threat_detector_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector() + ) + await client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_dns_threat_detector_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dns_threat_detector( + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].dns_threat_detector + mock_val = gcn_dns_threat_detector.DnsThreatDetector(name="name_value") + assert arg == mock_val + arg = args[0].dns_threat_detector_id + mock_val = "dns_threat_detector_id_value" + assert arg == mock_val + + +def test_create_dns_threat_detector_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dns_threat_detector( + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(), + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dns_threat_detector( + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].dns_threat_detector + mock_val = gcn_dns_threat_detector.DnsThreatDetector(name="name_value") + assert arg == mock_val + arg = args[0].dns_threat_detector_id + mock_val = "dns_threat_detector_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_dns_threat_detector_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dns_threat_detector( + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(), + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + dict, + ], +) +def test_update_dns_threat_detector(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + response = client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +def test_update_dns_threat_detector_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_dns_threat_detector(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + +def test_update_dns_threat_detector_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_dns_threat_detector + ] = mock_rpc + request = {} + client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_dns_threat_detector + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_dns_threat_detector + ] = mock_rpc + + request = {} + await client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_async( + transport: str = "grpc_asyncio", + request_type=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + response = await client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_async_from_dict(): + await test_update_dns_threat_detector_async(request_type=dict) + + +def test_update_dns_threat_detector_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + request.dns_threat_detector.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dns_threat_detector.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + request.dns_threat_detector.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector() + ) + await client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "dns_threat_detector.name=name_value", + ) in kw["metadata"] + + +def test_update_dns_threat_detector_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_dns_threat_detector( + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].dns_threat_detector + mock_val = gcn_dns_threat_detector.DnsThreatDetector(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_dns_threat_detector_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dns_threat_detector( + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(), + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_dns_threat_detector( + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].dns_threat_detector + mock_val = gcn_dns_threat_detector.DnsThreatDetector(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_dns_threat_detector_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_dns_threat_detector( + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(), + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.DeleteDnsThreatDetectorRequest, + dict, + ], +) +def test_delete_dns_threat_detector(request_type, transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dns_threat_detector_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dns_threat_detector.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_dns_threat_detector(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dns_threat_detector.DeleteDnsThreatDetectorRequest( + name="name_value", + ) + + +def test_delete_dns_threat_detector_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_dns_threat_detector + ] = mock_rpc + request = {} + client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_dns_threat_detector + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_dns_threat_detector + ] = mock_rpc + + request = {} + await client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_async( + transport: str = "grpc_asyncio", + request_type=dns_threat_detector.DeleteDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_async_from_dict(): + await test_delete_dns_threat_detector_async(request_type=dict) + + +def test_delete_dns_threat_detector_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + call.return_value = None + client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_dns_threat_detector_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dns_threat_detector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_dns_threat_detector_flattened_error(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dns_threat_detector( + dns_threat_detector.DeleteDnsThreatDetectorRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_flattened_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dns_threat_detector( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_flattened_error_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dns_threat_detector( + dns_threat_detector.DeleteDnsThreatDetectorRequest(), + name="name_value", + ) + + +def test_list_dns_threat_detectors_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_dns_threat_detectors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_dns_threat_detectors + ] = mock_rpc + + request = {} + client.list_dns_threat_detectors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_dns_threat_detectors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_dns_threat_detectors_rest_required_fields( + request_type=dns_threat_detector.ListDnsThreatDetectorsRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dns_threat_detectors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_dns_threat_detectors._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_dns_threat_detectors(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_dns_threat_detectors_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_dns_threat_detectors._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_dns_threat_detectors_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_dns_threat_detectors(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/dnsThreatDetectors" + % client.transport._host, + args[1], + ) + + +def test_list_dns_threat_detectors_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dns_threat_detectors( + dns_threat_detector.ListDnsThreatDetectorsRequest(), + parent="parent_value", + ) + + +def test_list_dns_threat_detectors_rest_pager(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="abc", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[], + next_page_token="def", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + ], + next_page_token="ghi", + ), + dns_threat_detector.ListDnsThreatDetectorsResponse( + dns_threat_detectors=[ + dns_threat_detector.DnsThreatDetector(), + dns_threat_detector.DnsThreatDetector(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + dns_threat_detector.ListDnsThreatDetectorsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_dns_threat_detectors(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, dns_threat_detector.DnsThreatDetector) for i in results + ) + + pages = list(client.list_dns_threat_detectors(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_dns_threat_detector_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_dns_threat_detector + ] = mock_rpc + + request = {} + client.get_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_dns_threat_detector_rest_required_fields( + request_type=dns_threat_detector.GetDnsThreatDetectorRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.DnsThreatDetector() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_dns_threat_detector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_dns_threat_detector_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_dns_threat_detector._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_dns_threat_detector_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.DnsThreatDetector() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_dns_threat_detector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/dnsThreatDetectors/*}" + % client.transport._host, + args[1], + ) + + +def test_get_dns_threat_detector_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dns_threat_detector( + dns_threat_detector.GetDnsThreatDetectorRequest(), + name="name_value", + ) + + +def test_create_dns_threat_detector_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_dns_threat_detector + ] = mock_rpc + + request = {} + client.create_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_dns_threat_detector_rest_required_fields( + request_type=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_dns_threat_detector._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("dns_threat_detector_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_dns_threat_detector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_dns_threat_detector_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_dns_threat_detector._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dnsThreatDetectorId",)) + & set( + ( + "parent", + "dnsThreatDetector", + ) + ) + ) + + +def test_create_dns_threat_detector_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_dns_threat_detector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{parent=projects/*/locations/*}/dnsThreatDetectors" + % client.transport._host, + args[1], + ) + + +def test_create_dns_threat_detector_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dns_threat_detector( + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest(), + parent="parent_value", + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + dns_threat_detector_id="dns_threat_detector_id_value", + ) + + +def test_update_dns_threat_detector_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_dns_threat_detector + ] = mock_rpc + + request = {} + client.update_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_dns_threat_detector_rest_required_fields( + request_type=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_dns_threat_detector._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_dns_threat_detector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_dns_threat_detector_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_dns_threat_detector._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("dnsThreatDetector",))) + + +def test_update_dns_threat_detector_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector() + + # get arguments that satisfy an http rule for this method + sample_request = { + "dns_threat_detector": { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_dns_threat_detector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{dns_threat_detector.name=projects/*/locations/*/dnsThreatDetectors/*}" + % client.transport._host, + args[1], + ) + + +def test_update_dns_threat_detector_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_dns_threat_detector( + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest(), + dns_threat_detector=gcn_dns_threat_detector.DnsThreatDetector( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_dns_threat_detector_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_dns_threat_detector + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_dns_threat_detector + ] = mock_rpc + + request = {} + client.delete_dns_threat_detector(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_dns_threat_detector(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_dns_threat_detector_rest_required_fields( + request_type=dns_threat_detector.DeleteDnsThreatDetectorRequest, +): + transport_class = transports.DnsThreatDetectorServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_dns_threat_detector._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_dns_threat_detector(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_dns_threat_detector_rest_unset_required_fields(): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_dns_threat_detector._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_dns_threat_detector_rest_flattened(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_dns_threat_detector(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{name=projects/*/locations/*/dnsThreatDetectors/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_dns_threat_detector_rest_flattened_error(transport: str = "rest"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dns_threat_detector( + dns_threat_detector.DeleteDnsThreatDetectorRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DnsThreatDetectorServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DnsThreatDetectorServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + transports.DnsThreatDetectorServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DnsThreatDetectorServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_dns_threat_detectors_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + call.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + client.list_dns_threat_detectors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.ListDnsThreatDetectorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_dns_threat_detector_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + call.return_value = dns_threat_detector.DnsThreatDetector() + client.get_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.GetDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_dns_threat_detector_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + client.create_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_dns_threat_detector_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + call.return_value = gcn_dns_threat_detector.DnsThreatDetector() + client.update_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_dns_threat_detector_empty_call_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + call.return_value = None + client.delete_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DnsThreatDetectorServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_dns_threat_detectors_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.ListDnsThreatDetectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_dns_threat_detectors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.ListDnsThreatDetectorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_dns_threat_detector_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + await client.get_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.GetDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_dns_threat_detector_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + await client.create_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_dns_threat_detector_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + ) + await client.update_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_dns_threat_detector_empty_call_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DnsThreatDetectorServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_dns_threat_detectors_rest_bad_request( + request_type=dns_threat_detector.ListDnsThreatDetectorsRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_dns_threat_detectors(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.ListDnsThreatDetectorsRequest, + dict, + ], +) +def test_list_dns_threat_detectors_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_dns_threat_detectors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDnsThreatDetectorsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_dns_threat_detectors_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_list_dns_threat_detectors", + ) as post, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_list_dns_threat_detectors_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_list_dns_threat_detectors", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dns_threat_detector.ListDnsThreatDetectorsRequest.pb( + dns_threat_detector.ListDnsThreatDetectorsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dns_threat_detector.ListDnsThreatDetectorsResponse.to_json( + dns_threat_detector.ListDnsThreatDetectorsResponse() + ) + req.return_value.content = return_value + + request = dns_threat_detector.ListDnsThreatDetectorsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dns_threat_detector.ListDnsThreatDetectorsResponse() + post_with_metadata.return_value = ( + dns_threat_detector.ListDnsThreatDetectorsResponse(), + metadata, + ) + + client.list_dns_threat_detectors( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_dns_threat_detector_rest_bad_request( + request_type=dns_threat_detector.GetDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_dns_threat_detector(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.GetDnsThreatDetectorRequest, + dict, + ], +) +def test_get_dns_threat_detector_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_dns_threat_detector(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert response.provider == dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_dns_threat_detector_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_get_dns_threat_detector", + ) as post, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_get_dns_threat_detector_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_get_dns_threat_detector", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dns_threat_detector.GetDnsThreatDetectorRequest.pb( + dns_threat_detector.GetDnsThreatDetectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dns_threat_detector.DnsThreatDetector.to_json( + dns_threat_detector.DnsThreatDetector() + ) + req.return_value.content = return_value + + request = dns_threat_detector.GetDnsThreatDetectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dns_threat_detector.DnsThreatDetector() + post_with_metadata.return_value = ( + dns_threat_detector.DnsThreatDetector(), + metadata, + ) + + client.get_dns_threat_detector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_dns_threat_detector_rest_bad_request( + request_type=gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_dns_threat_detector(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest, + dict, + ], +) +def test_create_dns_threat_detector_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["dns_threat_detector"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "excluded_networks": ["excluded_networks_value1", "excluded_networks_value2"], + "provider": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.meta.fields[ + "dns_threat_detector" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dns_threat_detector"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dns_threat_detector"][field])): + del request_init["dns_threat_detector"][field][i][subfield] + else: + del request_init["dns_threat_detector"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_dns_threat_detector(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_dns_threat_detector_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_create_dns_threat_detector", + ) as post, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_create_dns_threat_detector_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_create_dns_threat_detector", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest.pb( + gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcn_dns_threat_detector.DnsThreatDetector.to_json( + gcn_dns_threat_detector.DnsThreatDetector() + ) + req.return_value.content = return_value + + request = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcn_dns_threat_detector.DnsThreatDetector() + post_with_metadata.return_value = ( + gcn_dns_threat_detector.DnsThreatDetector(), + metadata, + ) + + client.create_dns_threat_detector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_dns_threat_detector_rest_bad_request( + request_type=gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "dns_threat_detector": { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_dns_threat_detector(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest, + dict, + ], +) +def test_update_dns_threat_detector_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "dns_threat_detector": { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + } + request_init["dns_threat_detector"] = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "excluded_networks": ["excluded_networks_value1", "excluded_networks_value2"], + "provider": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.meta.fields[ + "dns_threat_detector" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["dns_threat_detector"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["dns_threat_detector"][field])): + del request_init["dns_threat_detector"][field][i][subfield] + else: + del request_init["dns_threat_detector"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcn_dns_threat_detector.DnsThreatDetector( + name="name_value", + excluded_networks=["excluded_networks_value"], + provider=gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcn_dns_threat_detector.DnsThreatDetector.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_dns_threat_detector(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcn_dns_threat_detector.DnsThreatDetector) + assert response.name == "name_value" + assert response.excluded_networks == ["excluded_networks_value"] + assert ( + response.provider == gcn_dns_threat_detector.DnsThreatDetector.Provider.INFOBLOX + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_dns_threat_detector_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_update_dns_threat_detector", + ) as post, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "post_update_dns_threat_detector_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_update_dns_threat_detector", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest.pb( + gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcn_dns_threat_detector.DnsThreatDetector.to_json( + gcn_dns_threat_detector.DnsThreatDetector() + ) + req.return_value.content = return_value + + request = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcn_dns_threat_detector.DnsThreatDetector() + post_with_metadata.return_value = ( + gcn_dns_threat_detector.DnsThreatDetector(), + metadata, + ) + + client.update_dns_threat_detector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_dns_threat_detector_rest_bad_request( + request_type=dns_threat_detector.DeleteDnsThreatDetectorRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_dns_threat_detector(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dns_threat_detector.DeleteDnsThreatDetectorRequest, + dict, + ], +) +def test_delete_dns_threat_detector_rest_call_success(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/dnsThreatDetectors/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_dns_threat_detector(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_dns_threat_detector_rest_interceptors(null_interceptor): + transport = transports.DnsThreatDetectorServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DnsThreatDetectorServiceRestInterceptor(), + ) + client = DnsThreatDetectorServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DnsThreatDetectorServiceRestInterceptor, + "pre_delete_dns_threat_detector", + ) as pre: + pre.assert_not_called() + pb_message = dns_threat_detector.DeleteDnsThreatDetectorRequest.pb( + dns_threat_detector.DeleteDnsThreatDetectorRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = dns_threat_detector.DeleteDnsThreatDetectorRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_dns_threat_detector( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/sample2/authorizationPolicies/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_dns_threat_detectors_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_dns_threat_detectors), "__call__" + ) as call: + client.list_dns_threat_detectors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.ListDnsThreatDetectorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_dns_threat_detector_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_dns_threat_detector), "__call__" + ) as call: + client.get_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.GetDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_dns_threat_detector_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_dns_threat_detector), "__call__" + ) as call: + client.create_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.CreateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_dns_threat_detector_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_dns_threat_detector), "__call__" + ) as call: + client.update_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcn_dns_threat_detector.UpdateDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_dns_threat_detector_empty_call_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_dns_threat_detector), "__call__" + ) as call: + client.delete_dns_threat_detector(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dns_threat_detector.DeleteDnsThreatDetectorRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DnsThreatDetectorServiceGrpcTransport, + ) + + +def test_dns_threat_detector_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DnsThreatDetectorServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_dns_threat_detector_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_security_v1beta1.services.dns_threat_detector_service.transports.DnsThreatDetectorServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DnsThreatDetectorServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_dns_threat_detectors", + "get_dns_threat_detector", + "create_dns_threat_detector", + "update_dns_threat_detector", + "delete_dns_threat_detector", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_dns_threat_detector_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_security_v1beta1.services.dns_threat_detector_service.transports.DnsThreatDetectorServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DnsThreatDetectorServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_dns_threat_detector_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_security_v1beta1.services.dns_threat_detector_service.transports.DnsThreatDetectorServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DnsThreatDetectorServiceTransport() + adc.assert_called_once() + + +def test_dns_threat_detector_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DnsThreatDetectorServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ], +) +def test_dns_threat_detector_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + transports.DnsThreatDetectorServiceRestTransport, + ], +) +def test_dns_threat_detector_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DnsThreatDetectorServiceGrpcTransport, grpc_helpers), + (transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_dns_threat_detector_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networksecurity.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networksecurity.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ], +) +def test_dns_threat_detector_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_dns_threat_detector_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DnsThreatDetectorServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_dns_threat_detector_service_host_no_port(transport_name): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_dns_threat_detector_service_host_with_port(transport_name): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networksecurity.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networksecurity.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networksecurity.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_dns_threat_detector_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DnsThreatDetectorServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DnsThreatDetectorServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_dns_threat_detectors._session + session2 = client2.transport.list_dns_threat_detectors._session + assert session1 != session2 + session1 = client1.transport.get_dns_threat_detector._session + session2 = client2.transport.get_dns_threat_detector._session + assert session1 != session2 + session1 = client1.transport.create_dns_threat_detector._session + session2 = client2.transport.create_dns_threat_detector._session + assert session1 != session2 + session1 = client1.transport.update_dns_threat_detector._session + session2 = client2.transport.update_dns_threat_detector._session + assert session1 != session2 + session1 = client1.transport.delete_dns_threat_detector._session + session2 = client2.transport.delete_dns_threat_detector._session + assert session1 != session2 + + +def test_dns_threat_detector_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DnsThreatDetectorServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dns_threat_detector_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DnsThreatDetectorServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ], +) +def test_dns_threat_detector_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DnsThreatDetectorServiceGrpcTransport, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ], +) +def test_dns_threat_detector_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_dns_threat_detector_path(): + project = "squid" + location = "clam" + dns_threat_detector = "whelk" + expected = "projects/{project}/locations/{location}/dnsThreatDetectors/{dns_threat_detector}".format( + project=project, + location=location, + dns_threat_detector=dns_threat_detector, + ) + actual = DnsThreatDetectorServiceClient.dns_threat_detector_path( + project, location, dns_threat_detector + ) + assert expected == actual + + +def test_parse_dns_threat_detector_path(): + expected = { + "project": "octopus", + "location": "oyster", + "dns_threat_detector": "nudibranch", + } + path = DnsThreatDetectorServiceClient.dns_threat_detector_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_dns_threat_detector_path(path) + assert expected == actual + + +def test_network_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = DnsThreatDetectorServiceClient.network_path(project, network) + assert expected == actual + + +def test_parse_network_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = DnsThreatDetectorServiceClient.network_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_network_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DnsThreatDetectorServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = DnsThreatDetectorServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DnsThreatDetectorServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = DnsThreatDetectorServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DnsThreatDetectorServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = DnsThreatDetectorServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = DnsThreatDetectorServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = DnsThreatDetectorServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DnsThreatDetectorServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = DnsThreatDetectorServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DnsThreatDetectorServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DnsThreatDetectorServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DnsThreatDetectorServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DnsThreatDetectorServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DnsThreatDetectorServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DnsThreatDetectorServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + DnsThreatDetectorServiceClient, + transports.DnsThreatDetectorServiceGrpcTransport, + ), + ( + DnsThreatDetectorServiceAsyncClient, + transports.DnsThreatDetectorServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py index 8a0a69131c4a..8fe3d3f81a3e 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/__init__.py @@ -43,6 +43,7 @@ TransferCounters, TransferErrorLogEntry, TransferErrorSummary, + TransferMetadataOptions, TransferOperationMetadata, TransferType, UpdateInstanceRequest, @@ -74,6 +75,7 @@ "TransferCounters", "TransferErrorLogEntry", "TransferErrorSummary", + "TransferMetadataOptions", "TransferOperationMetadata", "TransferType", "UpdateInstanceRequest", diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py index 850650ae373c..9f2d6fe23d06 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/__init__.py @@ -37,6 +37,7 @@ TransferCounters, TransferErrorLogEntry, TransferErrorSummary, + TransferMetadataOptions, TransferOperationMetadata, TransferType, UpdateInstanceRequest, @@ -63,6 +64,7 @@ "TransferCounters", "TransferErrorLogEntry", "TransferErrorSummary", + "TransferMetadataOptions", "TransferOperationMetadata", "UpdateInstanceRequest", "DeploymentType", diff --git a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py index 113be9f56edf..fa624f139be3 100644 --- a/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py +++ b/packages/google-cloud-parallelstore/google/cloud/parallelstore_v1/types/parallelstore.py @@ -30,6 +30,7 @@ "DirectoryStripeLevel", "DeploymentType", "Instance", + "TransferMetadataOptions", "ListInstancesRequest", "ListInstancesResponse", "GetInstanceRequest", @@ -160,8 +161,8 @@ class Instance(proto.Message): between 12000 and 100000, in multiples of 4000; e.g., 12000, 16000, 20000, ... daos_version (str): - Output only. Deprecated 'daos_version' field. Output only. - The version of DAOS software running in the instance. + Output only. Deprecated: The version of DAOS + software running in the instance. access_points (MutableSequence[str]): Output only. A list of IPv4 addresses used for client side configuration. @@ -306,6 +307,82 @@ class State(proto.Enum): ) +class TransferMetadataOptions(proto.Message): + r"""Transfer metadata options for the instance. + + Attributes: + uid (google.cloud.parallelstore_v1.types.TransferMetadataOptions.Uid): + Optional. The UID preservation behavior. + gid (google.cloud.parallelstore_v1.types.TransferMetadataOptions.Gid): + Optional. The GID preservation behavior. + mode (google.cloud.parallelstore_v1.types.TransferMetadataOptions.Mode): + Optional. The mode preservation behavior. + """ + + class Uid(proto.Enum): + r"""The UID preservation behavior. + + Values: + UID_UNSPECIFIED (0): + default is UID_NUMBER_PRESERVE. + UID_SKIP (1): + Do not preserve UID during a transfer job. + UID_NUMBER_PRESERVE (2): + Preserve UID that is in number format during + a transfer job. + """ + UID_UNSPECIFIED = 0 + UID_SKIP = 1 + UID_NUMBER_PRESERVE = 2 + + class Gid(proto.Enum): + r"""The GID preservation behavior. + + Values: + GID_UNSPECIFIED (0): + default is GID_NUMBER_PRESERVE. + GID_SKIP (1): + Do not preserve GID during a transfer job. + GID_NUMBER_PRESERVE (2): + Preserve GID that is in number format during + a transfer job. + """ + GID_UNSPECIFIED = 0 + GID_SKIP = 1 + GID_NUMBER_PRESERVE = 2 + + class Mode(proto.Enum): + r"""The mode preservation behavior. + + Values: + MODE_UNSPECIFIED (0): + default is MODE_PRESERVE. + MODE_SKIP (1): + Do not preserve mode during a transfer job. + MODE_PRESERVE (2): + Preserve mode during a transfer job. + """ + MODE_UNSPECIFIED = 0 + MODE_SKIP = 1 + MODE_PRESERVE = 2 + + uid: Uid = proto.Field( + proto.ENUM, + number=1, + enum=Uid, + ) + gid: Gid = proto.Field( + proto.ENUM, + number=2, + enum=Gid, + ) + mode: Mode = proto.Field( + proto.ENUM, + number=3, + enum=Mode, + ) + + class ListInstancesRequest(proto.Message): r"""List instances request. @@ -428,11 +505,11 @@ class CreateInstanceRequest(proto.Message): for at least 60 minutes since the first request. For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -481,11 +558,11 @@ class UpdateInstanceRequest(proto.Message): for at least 60 minutes since the first request. For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -525,11 +602,11 @@ class DeleteInstanceRequest(proto.Message): for at least 60 minutes after the first request. For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -699,11 +776,11 @@ class ImportDataRequest(proto.Message): for at least 60 minutes since the first request. For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -722,6 +799,9 @@ class ImportDataRequest(proto.Message): If unspecified, the Parallelstore service agent is used: ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` + metadata_options (google.cloud.parallelstore_v1.types.TransferMetadataOptions): + Optional. The transfer metadata options for + the import data. """ source_gcs_bucket: "SourceGcsBucket" = proto.Field( @@ -748,6 +828,11 @@ class ImportDataRequest(proto.Message): proto.STRING, number=5, ) + metadata_options: "TransferMetadataOptions" = proto.Field( + proto.MESSAGE, + number=6, + message="TransferMetadataOptions", + ) class ExportDataRequest(proto.Message): @@ -775,11 +860,11 @@ class ExportDataRequest(proto.Message): for at least 60 minutes since the first request. For example, consider a situation where you make - an initial request and t he request times out. - If you make the request again with the same - request ID, the server can check if original - operation with the same request ID was received, - and if so, will ignore the second request. This + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. @@ -797,6 +882,9 @@ class ExportDataRequest(proto.Message): If unspecified, the Parallelstore service agent is used: ``service-@gcp-sa-parallelstore.iam.gserviceaccount.com`` + metadata_options (google.cloud.parallelstore_v1.types.TransferMetadataOptions): + Optional. The metadata options for the export + data. """ source_parallelstore: "SourceParallelstore" = proto.Field( @@ -823,6 +911,11 @@ class ExportDataRequest(proto.Message): proto.STRING, number=5, ) + metadata_options: "TransferMetadataOptions" = proto.Field( + proto.MESSAGE, + number=6, + message="TransferMetadataOptions", + ) class ImportDataResponse(proto.Message): @@ -1136,10 +1229,10 @@ class TransferCounters(proto.Message): Bytes that are copied to the data destination. objects_failed (int): - Objects that are failed to write to the data + Objects that failed to be written to the data destination. bytes_failed (int): - Bytes that are failed to write to the data + Bytes that failed to be written to the data destination. """ diff --git a/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py b/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py index b3f768eba686..8d9aad65ea60 100644 --- a/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py +++ b/packages/google-cloud-parallelstore/scripts/fixup_parallelstore_v1_keywords.py @@ -41,9 +41,9 @@ class parallelstoreCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', ), 'delete_instance': ('name', 'request_id', ), - 'export_data': ('name', 'source_parallelstore', 'destination_gcs_bucket', 'request_id', 'service_account', ), + 'export_data': ('name', 'source_parallelstore', 'destination_gcs_bucket', 'request_id', 'service_account', 'metadata_options', ), 'get_instance': ('name', ), - 'import_data': ('name', 'source_gcs_bucket', 'destination_parallelstore', 'request_id', 'service_account', ), + 'import_data': ('name', 'source_gcs_bucket', 'destination_parallelstore', 'request_id', 'service_account', 'metadata_options', ), 'list_instances': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'update_instance': ('update_mask', 'instance', 'request_id', ), } diff --git a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1/types/regions.py b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1/types/regions.py index 131378ad876e..6c409c3e8c7f 100644 --- a/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1/types/regions.py +++ b/packages/google-shopping-merchant-accounts/google/shopping/merchant_accounts_v1/types/regions.py @@ -19,6 +19,7 @@ from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore import proto # type: ignore __protobuf__ = proto.module( @@ -323,6 +324,9 @@ class Region(proto.Message): Output only. Indicates if the region is eligible for use in the Shipping Services configuration. + radius_area (google.shopping.merchant_accounts_v1.types.Region.RadiusArea): + Optional. A radius area that defines the + region area. """ class PostalCodeArea(proto.Message): @@ -396,6 +400,58 @@ class GeoTargetArea(proto.Message): number=1, ) + class RadiusArea(proto.Message): + r"""A radius area that defines the region area. + + Attributes: + region_code (str): + Required. `CLDR territory + code `__ + or the country the radius area applies to. + lat_lng (google.type.latlng_pb2.LatLng): + Required. The center of the radius area. It + represents a latitude/longitude pair in decimal + degrees format. + radius (float): + Required. The radius distance of the area. + radius_units (google.shopping.merchant_accounts_v1.types.Region.RadiusArea.RadiusUnits): + Optional. The unit of the radius. + """ + + class RadiusUnits(proto.Enum): + r"""The unit of measurement of the radius. Default is KILOMETERS. + + Values: + RADIUS_UNITS_UNSPECIFIED (0): + Unused default value + MILES (1): + The distance is measured in miles. + KILOMETERS (2): + The distance is measured in kilometers. + """ + RADIUS_UNITS_UNSPECIFIED = 0 + MILES = 1 + KILOMETERS = 2 + + region_code: str = proto.Field( + proto.STRING, + number=1, + ) + lat_lng: latlng_pb2.LatLng = proto.Field( + proto.MESSAGE, + number=2, + message=latlng_pb2.LatLng, + ) + radius: float = proto.Field( + proto.DOUBLE, + number=3, + ) + radius_units: "Region.RadiusArea.RadiusUnits" = proto.Field( + proto.ENUM, + number=4, + enum="Region.RadiusArea.RadiusUnits", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -425,6 +481,11 @@ class GeoTargetArea(proto.Message): number=6, message=wrappers_pb2.BoolValue, ) + radius_area: RadiusArea = proto.Field( + proto.MESSAGE, + number=7, + message=RadiusArea, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1/test_regions_service.py b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1/test_regions_service.py index 29ac5fb0c965..9cb68d4e838e 100644 --- a/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1/test_regions_service.py +++ b/packages/google-shopping-merchant-accounts/tests/unit/gapic/merchant_accounts_v1/test_regions_service.py @@ -53,6 +53,7 @@ from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import wrappers_pb2 # type: ignore +from google.type import latlng_pb2 # type: ignore from google.shopping.merchant_accounts_v1.services.regions_service import ( RegionsServiceAsyncClient, @@ -5738,6 +5739,12 @@ def test_create_region_rest_call_success(request_type): "geotarget_area": {"geotarget_criteria_ids": [2324, 2325]}, "regional_inventory_eligible": {"value": True}, "shipping_eligible": {}, + "radius_area": { + "region_code": "region_code_value", + "lat_lng": {"latitude": 0.86, "longitude": 0.971}, + "radius": 0.648, + "radius_units": 1, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6068,6 +6075,12 @@ def test_update_region_rest_call_success(request_type): "geotarget_area": {"geotarget_criteria_ids": [2324, 2325]}, "regional_inventory_eligible": {"value": True}, "shipping_eligible": {}, + "radius_area": { + "region_code": "region_code_value", + "lat_lng": {"latitude": 0.86, "longitude": 0.971}, + "radius": 0.648, + "radius_units": 1, + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency