diff --git a/.librarian/state.yaml b/.librarian/state.yaml index 1a0cd56fbc2c..36565ba07a73 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -40,7 +40,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-ads-marketingplatform-admin version: 0.2.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/marketingplatform/admin/v1alpha service_config: marketingplatformadmin_v1alpha.yaml @@ -623,7 +623,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-backupdr version: 0.3.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/cloud/backupdr/v1 service_config: backupdr_v1.yaml @@ -796,7 +796,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-analyticshub version: 0.5.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/cloud/bigquery/analyticshub/v1 service_config: analyticshub_v1.yaml @@ -1201,7 +1201,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-cloudsecuritycompliance version: 0.3.0 - last_generated_commit: 72e7439c8e7e9986cf1865e337fc7c64ca5bda1f + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/cloud/cloudsecuritycompliance/v1 service_config: cloudsecuritycompliance_v1.yaml @@ -1576,7 +1576,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dataplex version: 2.14.0 - last_generated_commit: e8365a7f88fabe8717cb8322b8ce784b03b6daea + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/cloud/dataplex/v1 service_config: dataplex_v1.yaml @@ -2719,7 +2719,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-network-management version: 1.29.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/cloud/networkmanagement/v1 service_config: networkmanagement_v1.yaml @@ -2761,7 +2761,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-network-services version: 0.6.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/cloud/networkservices/v1 service_config: networkservices_v1.yaml @@ -3844,7 +3844,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-translate version: 3.22.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/cloud/translate/v3beta1 service_config: translate_v3beta1.yaml @@ -4404,7 +4404,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-datasources version: 1.1.0 - last_generated_commit: 3322511885371d2b2253f209ccc3aa60d4100cfd + last_generated_commit: 53f97391f3451398f7b53c7f86dabd325d205677 apis: - path: google/shopping/merchant/datasources/v1 service_config: merchantapi_v1.yaml diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py index 4da1f9d4d206..30d9d0dcb7a8 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin/__init__.py @@ -25,17 +25,24 @@ MarketingplatformAdminServiceClient, ) from google.ads.marketingplatform_admin_v1alpha.types.marketingplatform_admin import ( - AnalyticsServiceLevel, CreateAnalyticsAccountLinkRequest, DeleteAnalyticsAccountLinkRequest, + FindSalesPartnerManagedClientsRequest, + FindSalesPartnerManagedClientsResponse, GetOrganizationRequest, ListAnalyticsAccountLinksRequest, ListAnalyticsAccountLinksResponse, + ListOrganizationsRequest, + ListOrganizationsResponse, + ReportPropertyUsageRequest, + ReportPropertyUsageResponse, SetPropertyServiceLevelRequest, SetPropertyServiceLevelResponse, ) from google.ads.marketingplatform_admin_v1alpha.types.resources import ( AnalyticsAccountLink, + AnalyticsPropertyType, + AnalyticsServiceLevel, LinkVerificationState, Organization, ) @@ -45,13 +52,20 @@ "MarketingplatformAdminServiceAsyncClient", "CreateAnalyticsAccountLinkRequest", "DeleteAnalyticsAccountLinkRequest", + "FindSalesPartnerManagedClientsRequest", + "FindSalesPartnerManagedClientsResponse", "GetOrganizationRequest", "ListAnalyticsAccountLinksRequest", "ListAnalyticsAccountLinksResponse", + "ListOrganizationsRequest", + "ListOrganizationsResponse", + "ReportPropertyUsageRequest", + "ReportPropertyUsageResponse", "SetPropertyServiceLevelRequest", "SetPropertyServiceLevelResponse", - "AnalyticsServiceLevel", "AnalyticsAccountLink", "Organization", + "AnalyticsPropertyType", + "AnalyticsServiceLevel", "LinkVerificationState", ) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py index dec3156333f7..f423719e2827 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/__init__.py @@ -23,29 +23,47 @@ MarketingplatformAdminServiceClient, ) from .types.marketingplatform_admin import ( - AnalyticsServiceLevel, CreateAnalyticsAccountLinkRequest, DeleteAnalyticsAccountLinkRequest, + FindSalesPartnerManagedClientsRequest, + FindSalesPartnerManagedClientsResponse, GetOrganizationRequest, ListAnalyticsAccountLinksRequest, ListAnalyticsAccountLinksResponse, + ListOrganizationsRequest, + ListOrganizationsResponse, + ReportPropertyUsageRequest, + ReportPropertyUsageResponse, SetPropertyServiceLevelRequest, SetPropertyServiceLevelResponse, ) -from .types.resources import AnalyticsAccountLink, LinkVerificationState, Organization +from .types.resources import ( + AnalyticsAccountLink, + AnalyticsPropertyType, + AnalyticsServiceLevel, + LinkVerificationState, + Organization, +) __all__ = ( "MarketingplatformAdminServiceAsyncClient", "AnalyticsAccountLink", + "AnalyticsPropertyType", "AnalyticsServiceLevel", "CreateAnalyticsAccountLinkRequest", "DeleteAnalyticsAccountLinkRequest", + "FindSalesPartnerManagedClientsRequest", + "FindSalesPartnerManagedClientsResponse", "GetOrganizationRequest", "LinkVerificationState", "ListAnalyticsAccountLinksRequest", "ListAnalyticsAccountLinksResponse", + "ListOrganizationsRequest", + "ListOrganizationsResponse", "MarketingplatformAdminServiceClient", "Organization", + "ReportPropertyUsageRequest", + "ReportPropertyUsageResponse", "SetPropertyServiceLevelRequest", "SetPropertyServiceLevelResponse", ) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json index 8d346e91ed67..7378e8a813a2 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/gapic_metadata.json @@ -20,6 +20,11 @@ "delete_analytics_account_link" ] }, + "FindSalesPartnerManagedClients": { + "methods": [ + "find_sales_partner_managed_clients" + ] + }, "GetOrganization": { "methods": [ "get_organization" @@ -30,6 +35,16 @@ "list_analytics_account_links" ] }, + "ListOrganizations": { + "methods": [ + "list_organizations" + ] + }, + "ReportPropertyUsage": { + "methods": [ + "report_property_usage" + ] + }, "SetPropertyServiceLevel": { "methods": [ "set_property_service_level" @@ -50,6 +65,11 @@ "delete_analytics_account_link" ] }, + "FindSalesPartnerManagedClients": { + "methods": [ + "find_sales_partner_managed_clients" + ] + }, "GetOrganization": { "methods": [ "get_organization" @@ -60,6 +80,16 @@ "list_analytics_account_links" ] }, + "ListOrganizations": { + "methods": [ + "list_organizations" + ] + }, + "ReportPropertyUsage": { + "methods": [ + "report_property_usage" + ] + }, "SetPropertyServiceLevel": { "methods": [ "set_property_service_level" @@ -80,6 +110,11 @@ "delete_analytics_account_link" ] }, + "FindSalesPartnerManagedClients": { + "methods": [ + "find_sales_partner_managed_clients" + ] + }, "GetOrganization": { "methods": [ "get_organization" @@ -90,6 +125,16 @@ "list_analytics_account_links" ] }, + "ListOrganizations": { + "methods": [ + "list_organizations" + ] + }, + "ReportPropertyUsage": { + "methods": [ + "report_property_usage" + ] + }, "SetPropertyServiceLevel": { "methods": [ "set_property_service_level" diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py index a8879c58fff9..1416ba2380b8 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/async_client.py @@ -437,6 +437,200 @@ async def sample_get_organization(): # Done; return the response. return response + async def list_organizations( + self, + request: Optional[ + Union[marketingplatform_admin.ListOrganizationsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListOrganizationsAsyncPager: + r"""Returns a list of organizations that the user has + access to. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_list_organizations(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListOrganizationsRequest( + ) + + # Make the request + page_result = client.list_organizations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsRequest, dict]]): + The request object. Request message for ListOrganizations + RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListOrganizationsAsyncPager: + Response message for + ListOrganizations RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.ListOrganizationsRequest): + request = marketingplatform_admin.ListOrganizationsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_organizations + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListOrganizationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def find_sales_partner_managed_clients( + self, + request: Optional[ + Union[marketingplatform_admin.FindSalesPartnerManagedClientsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> marketingplatform_admin.FindSalesPartnerManagedClientsResponse: + r"""Returns a list of clients managed by the sales + partner organization. + User needs to be an OrgAdmin/BillingAdmin on the sales + partner organization in order to view the end clients. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_find_sales_partner_managed_clients(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.FindSalesPartnerManagedClientsRequest( + organization="organization_value", + ) + + # Make the request + response = await client.find_sales_partner_managed_clients(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsRequest, dict]]): + The request object. Request message for + FindSalesPartnerManagedClients RPC. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsResponse: + Response message for + FindSalesPartnerManagedClients RPC. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.FindSalesPartnerManagedClientsRequest + ): + request = marketingplatform_admin.FindSalesPartnerManagedClientsRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.find_sales_partner_managed_clients + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("organization", request.organization),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_analytics_account_links( self, request: Optional[ @@ -937,6 +1131,135 @@ async def sample_set_property_service_level(): # Done; return the response. return response + async def report_property_usage( + self, + request: Optional[ + Union[marketingplatform_admin.ReportPropertyUsageRequest, dict] + ] = None, + *, + organization: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> marketingplatform_admin.ReportPropertyUsageResponse: + r"""Get the usage and billing data for properties within + the organization for the specified month. + + Per direct client org, user needs to be + OrgAdmin/BillingAdmin on the organization in order to + view the billing and usage data. + + Per sales partner client org, user needs to be + OrgAdmin/BillingAdmin on the sales partner org in order + to view the billing and usage data, or + OrgAdmin/BillingAdmin on the sales partner client org in + order to view the usage data only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + async def sample_report_property_usage(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ReportPropertyUsageRequest( + organization="organization_value", + month="month_value", + ) + + # Make the request + response = await client.report_property_usage(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageRequest, dict]]): + The request object. Request message for + ReportPropertyUsage RPC. + organization (:class:`str`): + Required. Specifies the organization whose property + usage will be listed. + + Format: organizations/{org_id} + + This corresponds to the ``organization`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageResponse: + Response message for + ReportPropertyUsage RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [organization] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.ReportPropertyUsageRequest): + request = marketingplatform_admin.ReportPropertyUsageRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if organization is not None: + request.organization = organization + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.report_property_usage + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("organization", request.organization),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "MarketingplatformAdminServiceAsyncClient": return self diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py index fdeee05657d8..b2f9bbfd003b 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/client.py @@ -889,6 +889,198 @@ def sample_get_organization(): # Done; return the response. return response + def list_organizations( + self, + request: Optional[ + Union[marketingplatform_admin.ListOrganizationsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListOrganizationsPager: + r"""Returns a list of organizations that the user has + access to. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_list_organizations(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListOrganizationsRequest( + ) + + # Make the request + page_result = client.list_organizations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsRequest, dict]): + The request object. Request message for ListOrganizations + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListOrganizationsPager: + Response message for + ListOrganizations RPC. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.ListOrganizationsRequest): + request = marketingplatform_admin.ListOrganizationsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_organizations] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOrganizationsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def find_sales_partner_managed_clients( + self, + request: Optional[ + Union[marketingplatform_admin.FindSalesPartnerManagedClientsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> marketingplatform_admin.FindSalesPartnerManagedClientsResponse: + r"""Returns a list of clients managed by the sales + partner organization. + User needs to be an OrgAdmin/BillingAdmin on the sales + partner organization in order to view the end clients. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_find_sales_partner_managed_clients(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.FindSalesPartnerManagedClientsRequest( + organization="organization_value", + ) + + # Make the request + response = client.find_sales_partner_managed_clients(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsRequest, dict]): + The request object. Request message for + FindSalesPartnerManagedClients RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsResponse: + Response message for + FindSalesPartnerManagedClients RPC. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, marketingplatform_admin.FindSalesPartnerManagedClientsRequest + ): + request = marketingplatform_admin.FindSalesPartnerManagedClientsRequest( + request + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.find_sales_partner_managed_clients + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("organization", request.organization),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def list_analytics_account_links( self, request: Optional[ @@ -1385,6 +1577,132 @@ def sample_set_property_service_level(): # Done; return the response. return response + def report_property_usage( + self, + request: Optional[ + Union[marketingplatform_admin.ReportPropertyUsageRequest, dict] + ] = None, + *, + organization: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> marketingplatform_admin.ReportPropertyUsageResponse: + r"""Get the usage and billing data for properties within + the organization for the specified month. + + Per direct client org, user needs to be + OrgAdmin/BillingAdmin on the organization in order to + view the billing and usage data. + + Per sales partner client org, user needs to be + OrgAdmin/BillingAdmin on the sales partner org in order + to view the billing and usage data, or + OrgAdmin/BillingAdmin on the sales partner client org in + order to view the usage data only. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.ads import marketingplatform_admin_v1alpha + + def sample_report_property_usage(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ReportPropertyUsageRequest( + organization="organization_value", + month="month_value", + ) + + # Make the request + response = client.report_property_usage(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageRequest, dict]): + The request object. Request message for + ReportPropertyUsage RPC. + organization (str): + Required. Specifies the organization whose property + usage will be listed. + + Format: organizations/{org_id} + + This corresponds to the ``organization`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageResponse: + Response message for + ReportPropertyUsage RPC. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [organization] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, marketingplatform_admin.ReportPropertyUsageRequest): + request = marketingplatform_admin.ReportPropertyUsageRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if organization is not None: + request.organization = organization + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.report_property_usage] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("organization", request.organization),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "MarketingplatformAdminServiceClient": return self diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py index 0436ed0b0d73..f31c78893a3a 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/pagers.py @@ -44,6 +44,166 @@ ) +class ListOrganizationsPager: + """A pager for iterating through ``list_organizations`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``organizations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOrganizations`` requests and continue to iterate + through the ``organizations`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., marketingplatform_admin.ListOrganizationsResponse], + request: marketingplatform_admin.ListOrganizationsRequest, + response: marketingplatform_admin.ListOrganizationsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = marketingplatform_admin.ListOrganizationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[marketingplatform_admin.ListOrganizationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[resources.Organization]: + for page in self.pages: + yield from page.organizations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListOrganizationsAsyncPager: + """A pager for iterating through ``list_organizations`` requests. + + This class thinly wraps an initial + :class:`google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``organizations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListOrganizations`` requests and continue to iterate + through the ``organizations`` field on the + corresponding responses. + + All the usual :class:`google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[marketingplatform_admin.ListOrganizationsResponse] + ], + request: marketingplatform_admin.ListOrganizationsRequest, + response: marketingplatform_admin.ListOrganizationsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsRequest): + The initial request object. + response (google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = marketingplatform_admin.ListOrganizationsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[marketingplatform_admin.ListOrganizationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[resources.Organization]: + async def async_generator(): + async for page in self.pages: + for response in page.organizations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListAnalyticsAccountLinksPager: """A pager for iterating through ``list_analytics_account_links`` requests. diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py index a0fa7888d09d..a4edc5f2ed0b 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/base.py @@ -145,6 +145,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_organizations: gapic_v1.method.wrap_method( + self.list_organizations, + default_timeout=None, + client_info=client_info, + ), + self.find_sales_partner_managed_clients: gapic_v1.method.wrap_method( + self.find_sales_partner_managed_clients, + default_timeout=None, + client_info=client_info, + ), self.list_analytics_account_links: gapic_v1.method.wrap_method( self.list_analytics_account_links, default_timeout=None, @@ -165,6 +175,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.report_property_usage: gapic_v1.method.wrap_method( + self.report_property_usage, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -185,6 +200,30 @@ def get_organization( ]: raise NotImplementedError() + @property + def list_organizations( + self, + ) -> Callable[ + [marketingplatform_admin.ListOrganizationsRequest], + Union[ + marketingplatform_admin.ListOrganizationsResponse, + Awaitable[marketingplatform_admin.ListOrganizationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def find_sales_partner_managed_clients( + self, + ) -> Callable[ + [marketingplatform_admin.FindSalesPartnerManagedClientsRequest], + Union[ + marketingplatform_admin.FindSalesPartnerManagedClientsResponse, + Awaitable[marketingplatform_admin.FindSalesPartnerManagedClientsResponse], + ], + ]: + raise NotImplementedError() + @property def list_analytics_account_links( self, @@ -229,6 +268,18 @@ def set_property_service_level( ]: raise NotImplementedError() + @property + def report_property_usage( + self, + ) -> Callable[ + [marketingplatform_admin.ReportPropertyUsageRequest], + Union[ + marketingplatform_admin.ReportPropertyUsageResponse, + Awaitable[marketingplatform_admin.ReportPropertyUsageResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py index 97c4e79c239b..571481963ff7 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc.py @@ -356,6 +356,71 @@ def get_organization( ) return self._stubs["get_organization"] + @property + def list_organizations( + self, + ) -> Callable[ + [marketingplatform_admin.ListOrganizationsRequest], + marketingplatform_admin.ListOrganizationsResponse, + ]: + r"""Return a callable for the list organizations method over gRPC. + + Returns a list of organizations that the user has + access to. + + Returns: + Callable[[~.ListOrganizationsRequest], + ~.ListOrganizationsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_organizations" not in self._stubs: + self._stubs["list_organizations"] = self._logged_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListOrganizations", + request_serializer=marketingplatform_admin.ListOrganizationsRequest.serialize, + response_deserializer=marketingplatform_admin.ListOrganizationsResponse.deserialize, + ) + return self._stubs["list_organizations"] + + @property + def find_sales_partner_managed_clients( + self, + ) -> Callable[ + [marketingplatform_admin.FindSalesPartnerManagedClientsRequest], + marketingplatform_admin.FindSalesPartnerManagedClientsResponse, + ]: + r"""Return a callable for the find sales partner managed + clients method over gRPC. + + Returns a list of clients managed by the sales + partner organization. + User needs to be an OrgAdmin/BillingAdmin on the sales + partner organization in order to view the end clients. + + Returns: + Callable[[~.FindSalesPartnerManagedClientsRequest], + ~.FindSalesPartnerManagedClientsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "find_sales_partner_managed_clients" not in self._stubs: + self._stubs[ + "find_sales_partner_managed_clients" + ] = self._logged_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/FindSalesPartnerManagedClients", + request_serializer=marketingplatform_admin.FindSalesPartnerManagedClientsRequest.serialize, + response_deserializer=marketingplatform_admin.FindSalesPartnerManagedClientsResponse.deserialize, + ) + return self._stubs["find_sales_partner_managed_clients"] + @property def list_analytics_account_links( self, @@ -492,6 +557,46 @@ def set_property_service_level( ) return self._stubs["set_property_service_level"] + @property + def report_property_usage( + self, + ) -> Callable[ + [marketingplatform_admin.ReportPropertyUsageRequest], + marketingplatform_admin.ReportPropertyUsageResponse, + ]: + r"""Return a callable for the report property usage method over gRPC. + + Get the usage and billing data for properties within + the organization for the specified month. + + Per direct client org, user needs to be + OrgAdmin/BillingAdmin on the organization in order to + view the billing and usage data. + + Per sales partner client org, user needs to be + OrgAdmin/BillingAdmin on the sales partner org in order + to view the billing and usage data, or + OrgAdmin/BillingAdmin on the sales partner client org in + order to view the usage data only. + + Returns: + Callable[[~.ReportPropertyUsageRequest], + ~.ReportPropertyUsageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "report_property_usage" not in self._stubs: + self._stubs["report_property_usage"] = self._logged_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ReportPropertyUsage", + request_serializer=marketingplatform_admin.ReportPropertyUsageRequest.serialize, + response_deserializer=marketingplatform_admin.ReportPropertyUsageResponse.deserialize, + ) + return self._stubs["report_property_usage"] + def close(self): self._logged_channel.close() diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py index c7e1801bf6eb..1a99d9895164 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/grpc_asyncio.py @@ -365,6 +365,71 @@ def get_organization( ) return self._stubs["get_organization"] + @property + def list_organizations( + self, + ) -> Callable[ + [marketingplatform_admin.ListOrganizationsRequest], + Awaitable[marketingplatform_admin.ListOrganizationsResponse], + ]: + r"""Return a callable for the list organizations method over gRPC. + + Returns a list of organizations that the user has + access to. + + Returns: + Callable[[~.ListOrganizationsRequest], + Awaitable[~.ListOrganizationsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_organizations" not in self._stubs: + self._stubs["list_organizations"] = self._logged_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ListOrganizations", + request_serializer=marketingplatform_admin.ListOrganizationsRequest.serialize, + response_deserializer=marketingplatform_admin.ListOrganizationsResponse.deserialize, + ) + return self._stubs["list_organizations"] + + @property + def find_sales_partner_managed_clients( + self, + ) -> Callable[ + [marketingplatform_admin.FindSalesPartnerManagedClientsRequest], + Awaitable[marketingplatform_admin.FindSalesPartnerManagedClientsResponse], + ]: + r"""Return a callable for the find sales partner managed + clients method over gRPC. + + Returns a list of clients managed by the sales + partner organization. + User needs to be an OrgAdmin/BillingAdmin on the sales + partner organization in order to view the end clients. + + Returns: + Callable[[~.FindSalesPartnerManagedClientsRequest], + Awaitable[~.FindSalesPartnerManagedClientsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "find_sales_partner_managed_clients" not in self._stubs: + self._stubs[ + "find_sales_partner_managed_clients" + ] = self._logged_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/FindSalesPartnerManagedClients", + request_serializer=marketingplatform_admin.FindSalesPartnerManagedClientsRequest.serialize, + response_deserializer=marketingplatform_admin.FindSalesPartnerManagedClientsResponse.deserialize, + ) + return self._stubs["find_sales_partner_managed_clients"] + @property def list_analytics_account_links( self, @@ -502,6 +567,46 @@ def set_property_service_level( ) return self._stubs["set_property_service_level"] + @property + def report_property_usage( + self, + ) -> Callable[ + [marketingplatform_admin.ReportPropertyUsageRequest], + Awaitable[marketingplatform_admin.ReportPropertyUsageResponse], + ]: + r"""Return a callable for the report property usage method over gRPC. + + Get the usage and billing data for properties within + the organization for the specified month. + + Per direct client org, user needs to be + OrgAdmin/BillingAdmin on the organization in order to + view the billing and usage data. + + Per sales partner client org, user needs to be + OrgAdmin/BillingAdmin on the sales partner org in order + to view the billing and usage data, or + OrgAdmin/BillingAdmin on the sales partner client org in + order to view the usage data only. + + Returns: + Callable[[~.ReportPropertyUsageRequest], + Awaitable[~.ReportPropertyUsageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "report_property_usage" not in self._stubs: + self._stubs["report_property_usage"] = self._logged_channel.unary_unary( + "/google.marketingplatform.admin.v1alpha.MarketingplatformAdminService/ReportPropertyUsage", + request_serializer=marketingplatform_admin.ReportPropertyUsageRequest.serialize, + response_deserializer=marketingplatform_admin.ReportPropertyUsageResponse.deserialize, + ) + return self._stubs["report_property_usage"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -510,6 +615,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_organizations: self._wrap_method( + self.list_organizations, + default_timeout=None, + client_info=client_info, + ), + self.find_sales_partner_managed_clients: self._wrap_method( + self.find_sales_partner_managed_clients, + default_timeout=None, + client_info=client_info, + ), self.list_analytics_account_links: self._wrap_method( self.list_analytics_account_links, default_timeout=None, @@ -530,6 +645,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.report_property_usage: self._wrap_method( + self.report_property_usage, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py index 220347cfc789..81e0d3d51106 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest.py @@ -88,6 +88,14 @@ def pre_delete_analytics_account_link(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_find_sales_partner_managed_clients(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_find_sales_partner_managed_clients(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_organization(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -104,6 +112,22 @@ def post_list_analytics_account_links(self, response): logging.log(f"Received response: {response}") return response + def pre_list_organizations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_organizations(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_report_property_usage(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_report_property_usage(self, response): + logging.log(f"Received response: {response}") + return response + def pre_set_property_service_level(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -182,6 +206,58 @@ def pre_delete_analytics_account_link( """ return request, metadata + def pre_find_sales_partner_managed_clients( + self, + request: marketingplatform_admin.FindSalesPartnerManagedClientsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + marketingplatform_admin.FindSalesPartnerManagedClientsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for find_sales_partner_managed_clients + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_find_sales_partner_managed_clients( + self, response: marketingplatform_admin.FindSalesPartnerManagedClientsResponse + ) -> marketingplatform_admin.FindSalesPartnerManagedClientsResponse: + """Post-rpc interceptor for find_sales_partner_managed_clients + + DEPRECATED. Please use the `post_find_sales_partner_managed_clients_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. This `post_find_sales_partner_managed_clients` interceptor runs + before the `post_find_sales_partner_managed_clients_with_metadata` interceptor. + """ + return response + + def post_find_sales_partner_managed_clients_with_metadata( + self, + response: marketingplatform_admin.FindSalesPartnerManagedClientsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + marketingplatform_admin.FindSalesPartnerManagedClientsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for find_sales_partner_managed_clients + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MarketingplatformAdminService server but before it is returned to user code. + + We recommend only using this `post_find_sales_partner_managed_clients_with_metadata` + interceptor in new development instead of the `post_find_sales_partner_managed_clients` interceptor. + When both interceptors are used, this `post_find_sales_partner_managed_clients_with_metadata` interceptor runs after the + `post_find_sales_partner_managed_clients` interceptor. The (possibly modified) response returned by + `post_find_sales_partner_managed_clients` will be passed to + `post_find_sales_partner_managed_clients_with_metadata`. + """ + return response, metadata + def pre_get_organization( self, request: marketingplatform_admin.GetOrganizationRequest, @@ -283,6 +359,110 @@ def post_list_analytics_account_links_with_metadata( """ return response, metadata + def pre_list_organizations( + self, + request: marketingplatform_admin.ListOrganizationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + marketingplatform_admin.ListOrganizationsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_organizations + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_list_organizations( + self, response: marketingplatform_admin.ListOrganizationsResponse + ) -> marketingplatform_admin.ListOrganizationsResponse: + """Post-rpc interceptor for list_organizations + + DEPRECATED. Please use the `post_list_organizations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. This `post_list_organizations` interceptor runs + before the `post_list_organizations_with_metadata` interceptor. + """ + return response + + def post_list_organizations_with_metadata( + self, + response: marketingplatform_admin.ListOrganizationsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + marketingplatform_admin.ListOrganizationsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_organizations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MarketingplatformAdminService server but before it is returned to user code. + + We recommend only using this `post_list_organizations_with_metadata` + interceptor in new development instead of the `post_list_organizations` interceptor. + When both interceptors are used, this `post_list_organizations_with_metadata` interceptor runs after the + `post_list_organizations` interceptor. The (possibly modified) response returned by + `post_list_organizations` will be passed to + `post_list_organizations_with_metadata`. + """ + return response, metadata + + def pre_report_property_usage( + self, + request: marketingplatform_admin.ReportPropertyUsageRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + marketingplatform_admin.ReportPropertyUsageRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for report_property_usage + + Override in a subclass to manipulate the request or metadata + before they are sent to the MarketingplatformAdminService server. + """ + return request, metadata + + def post_report_property_usage( + self, response: marketingplatform_admin.ReportPropertyUsageResponse + ) -> marketingplatform_admin.ReportPropertyUsageResponse: + """Post-rpc interceptor for report_property_usage + + DEPRECATED. Please use the `post_report_property_usage_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the MarketingplatformAdminService server but before + it is returned to user code. This `post_report_property_usage` interceptor runs + before the `post_report_property_usage_with_metadata` interceptor. + """ + return response + + def post_report_property_usage_with_metadata( + self, + response: marketingplatform_admin.ReportPropertyUsageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + marketingplatform_admin.ReportPropertyUsageResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for report_property_usage + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the MarketingplatformAdminService server but before it is returned to user code. + + We recommend only using this `post_report_property_usage_with_metadata` + interceptor in new development instead of the `post_report_property_usage` interceptor. + When both interceptors are used, this `post_report_property_usage_with_metadata` interceptor runs after the + `post_report_property_usage` interceptor. The (possibly modified) response returned by + `post_report_property_usage` will be passed to + `post_report_property_usage_with_metadata`. + """ + return response, metadata + def pre_set_property_service_level( self, request: marketingplatform_admin.SetPropertyServiceLevelRequest, @@ -705,6 +885,175 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _FindSalesPartnerManagedClients( + _BaseMarketingplatformAdminServiceRestTransport._BaseFindSalesPartnerManagedClients, + MarketingplatformAdminServiceRestStub, + ): + def __hash__(self): + return hash( + "MarketingplatformAdminServiceRestTransport.FindSalesPartnerManagedClients" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: marketingplatform_admin.FindSalesPartnerManagedClientsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> marketingplatform_admin.FindSalesPartnerManagedClientsResponse: + r"""Call the find sales partner + managed clients method over HTTP. + + Args: + request (~.marketingplatform_admin.FindSalesPartnerManagedClientsRequest): + The request object. Request message for + FindSalesPartnerManagedClients RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.marketingplatform_admin.FindSalesPartnerManagedClientsResponse: + Response message for + FindSalesPartnerManagedClients RPC. + + """ + + http_options = ( + _BaseMarketingplatformAdminServiceRestTransport._BaseFindSalesPartnerManagedClients._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_find_sales_partner_managed_clients( + request, metadata + ) + transcoded_request = _BaseMarketingplatformAdminServiceRestTransport._BaseFindSalesPartnerManagedClients._get_transcoded_request( + http_options, request + ) + + body = _BaseMarketingplatformAdminServiceRestTransport._BaseFindSalesPartnerManagedClients._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMarketingplatformAdminServiceRestTransport._BaseFindSalesPartnerManagedClients._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.marketingplatform.admin_v1alpha.MarketingplatformAdminServiceClient.FindSalesPartnerManagedClients", + extra={ + "serviceName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "rpcName": "FindSalesPartnerManagedClients", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MarketingplatformAdminServiceRestTransport._FindSalesPartnerManagedClients._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.FindSalesPartnerManagedClientsResponse() + pb_resp = marketingplatform_admin.FindSalesPartnerManagedClientsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_find_sales_partner_managed_clients(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_find_sales_partner_managed_clients_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = marketingplatform_admin.FindSalesPartnerManagedClientsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.marketingplatform.admin_v1alpha.MarketingplatformAdminServiceClient.find_sales_partner_managed_clients", + extra={ + "serviceName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "rpcName": "FindSalesPartnerManagedClients", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetOrganization( _BaseMarketingplatformAdminServiceRestTransport._BaseGetOrganization, MarketingplatformAdminServiceRestStub, @@ -1010,13 +1359,329 @@ def __call__( ) return resp - class _SetPropertyServiceLevel( - _BaseMarketingplatformAdminServiceRestTransport._BaseSetPropertyServiceLevel, + class _ListOrganizations( + _BaseMarketingplatformAdminServiceRestTransport._BaseListOrganizations, MarketingplatformAdminServiceRestStub, ): def __hash__(self): - return hash( - "MarketingplatformAdminServiceRestTransport.SetPropertyServiceLevel" + return hash("MarketingplatformAdminServiceRestTransport.ListOrganizations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: marketingplatform_admin.ListOrganizationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> marketingplatform_admin.ListOrganizationsResponse: + r"""Call the list organizations method over HTTP. + + Args: + request (~.marketingplatform_admin.ListOrganizationsRequest): + The request object. Request message for ListOrganizations + RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.marketingplatform_admin.ListOrganizationsResponse: + Response message for + ListOrganizations RPC. + + """ + + http_options = ( + _BaseMarketingplatformAdminServiceRestTransport._BaseListOrganizations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_organizations( + request, metadata + ) + transcoded_request = _BaseMarketingplatformAdminServiceRestTransport._BaseListOrganizations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMarketingplatformAdminServiceRestTransport._BaseListOrganizations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.marketingplatform.admin_v1alpha.MarketingplatformAdminServiceClient.ListOrganizations", + extra={ + "serviceName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "rpcName": "ListOrganizations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MarketingplatformAdminServiceRestTransport._ListOrganizations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.ListOrganizationsResponse() + pb_resp = marketingplatform_admin.ListOrganizationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_organizations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_organizations_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + marketingplatform_admin.ListOrganizationsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.marketingplatform.admin_v1alpha.MarketingplatformAdminServiceClient.list_organizations", + extra={ + "serviceName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "rpcName": "ListOrganizations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ReportPropertyUsage( + _BaseMarketingplatformAdminServiceRestTransport._BaseReportPropertyUsage, + MarketingplatformAdminServiceRestStub, + ): + def __hash__(self): + return hash( + "MarketingplatformAdminServiceRestTransport.ReportPropertyUsage" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: marketingplatform_admin.ReportPropertyUsageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> marketingplatform_admin.ReportPropertyUsageResponse: + r"""Call the report property usage method over HTTP. + + Args: + request (~.marketingplatform_admin.ReportPropertyUsageRequest): + The request object. Request message for + ReportPropertyUsage RPC. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.marketingplatform_admin.ReportPropertyUsageResponse: + Response message for + ReportPropertyUsage RPC. + + """ + + http_options = ( + _BaseMarketingplatformAdminServiceRestTransport._BaseReportPropertyUsage._get_http_options() + ) + + request, metadata = self._interceptor.pre_report_property_usage( + request, metadata + ) + transcoded_request = _BaseMarketingplatformAdminServiceRestTransport._BaseReportPropertyUsage._get_transcoded_request( + http_options, request + ) + + body = _BaseMarketingplatformAdminServiceRestTransport._BaseReportPropertyUsage._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMarketingplatformAdminServiceRestTransport._BaseReportPropertyUsage._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.marketingplatform.admin_v1alpha.MarketingplatformAdminServiceClient.ReportPropertyUsage", + extra={ + "serviceName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "rpcName": "ReportPropertyUsage", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MarketingplatformAdminServiceRestTransport._ReportPropertyUsage._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = marketingplatform_admin.ReportPropertyUsageResponse() + pb_resp = marketingplatform_admin.ReportPropertyUsageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_report_property_usage(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_report_property_usage_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + marketingplatform_admin.ReportPropertyUsageResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.marketingplatform.admin_v1alpha.MarketingplatformAdminServiceClient.report_property_usage", + extra={ + "serviceName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "rpcName": "ReportPropertyUsage", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetPropertyServiceLevel( + _BaseMarketingplatformAdminServiceRestTransport._BaseSetPropertyServiceLevel, + MarketingplatformAdminServiceRestStub, + ): + def __hash__(self): + return hash( + "MarketingplatformAdminServiceRestTransport.SetPropertyServiceLevel" ) @staticmethod @@ -1194,6 +1859,17 @@ def delete_analytics_account_link( # In C++ this would require a dynamic_cast return self._DeleteAnalyticsAccountLink(self._session, self._host, self._interceptor) # type: ignore + @property + def find_sales_partner_managed_clients( + self, + ) -> Callable[ + [marketingplatform_admin.FindSalesPartnerManagedClientsRequest], + marketingplatform_admin.FindSalesPartnerManagedClientsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FindSalesPartnerManagedClients(self._session, self._host, self._interceptor) # type: ignore + @property def get_organization( self, @@ -1215,6 +1891,28 @@ def list_analytics_account_links( # In C++ this would require a dynamic_cast return self._ListAnalyticsAccountLinks(self._session, self._host, self._interceptor) # type: ignore + @property + def list_organizations( + self, + ) -> Callable[ + [marketingplatform_admin.ListOrganizationsRequest], + marketingplatform_admin.ListOrganizationsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListOrganizations(self._session, self._host, self._interceptor) # type: ignore + + @property + def report_property_usage( + self, + ) -> Callable[ + [marketingplatform_admin.ReportPropertyUsageRequest], + marketingplatform_admin.ReportPropertyUsageResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReportPropertyUsage(self._session, self._host, self._interceptor) # type: ignore + @property def set_property_service_level( self, diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest_base.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest_base.py index fd985ea5637d..07c7078500af 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest_base.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/services/marketingplatform_admin_service/transports/rest_base.py @@ -201,6 +201,67 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseFindSalesPartnerManagedClients: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{organization=organizations/*}:findSalesPartnerManagedClients", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + marketingplatform_admin.FindSalesPartnerManagedClientsRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMarketingplatformAdminServiceRestTransport._BaseFindSalesPartnerManagedClients._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetOrganization: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -297,6 +358,95 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListOrganizations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/organizations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = marketingplatform_admin.ListOrganizationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseReportPropertyUsage: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{organization=organizations/*}:reportPropertyUsage", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = marketingplatform_admin.ReportPropertyUsageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMarketingplatformAdminServiceRestTransport._BaseReportPropertyUsage._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseSetPropertyServiceLevel: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py index 7404673eb608..5c08f7971e39 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/__init__.py @@ -14,27 +14,45 @@ # limitations under the License. # from .marketingplatform_admin import ( - AnalyticsServiceLevel, CreateAnalyticsAccountLinkRequest, DeleteAnalyticsAccountLinkRequest, + FindSalesPartnerManagedClientsRequest, + FindSalesPartnerManagedClientsResponse, GetOrganizationRequest, ListAnalyticsAccountLinksRequest, ListAnalyticsAccountLinksResponse, + ListOrganizationsRequest, + ListOrganizationsResponse, + ReportPropertyUsageRequest, + ReportPropertyUsageResponse, SetPropertyServiceLevelRequest, SetPropertyServiceLevelResponse, ) -from .resources import AnalyticsAccountLink, LinkVerificationState, Organization +from .resources import ( + AnalyticsAccountLink, + AnalyticsPropertyType, + AnalyticsServiceLevel, + LinkVerificationState, + Organization, +) __all__ = ( "CreateAnalyticsAccountLinkRequest", "DeleteAnalyticsAccountLinkRequest", + "FindSalesPartnerManagedClientsRequest", + "FindSalesPartnerManagedClientsResponse", "GetOrganizationRequest", "ListAnalyticsAccountLinksRequest", "ListAnalyticsAccountLinksResponse", + "ListOrganizationsRequest", + "ListOrganizationsResponse", + "ReportPropertyUsageRequest", + "ReportPropertyUsageResponse", "SetPropertyServiceLevelRequest", "SetPropertyServiceLevelResponse", - "AnalyticsServiceLevel", "AnalyticsAccountLink", "Organization", + "AnalyticsPropertyType", + "AnalyticsServiceLevel", "LinkVerificationState", ) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py index b704eb3ae105..ed54d076b297 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/marketingplatform_admin.py @@ -17,6 +17,8 @@ from typing import MutableMapping, MutableSequence +from google.type import date_pb2 # type: ignore +from google.type import money_pb2 # type: ignore import proto # type: ignore from google.ads.marketingplatform_admin_v1alpha.types import resources @@ -24,34 +26,23 @@ __protobuf__ = proto.module( package="google.marketingplatform.admin.v1alpha", manifest={ - "AnalyticsServiceLevel", "GetOrganizationRequest", + "ListOrganizationsRequest", + "ListOrganizationsResponse", + "FindSalesPartnerManagedClientsRequest", + "FindSalesPartnerManagedClientsResponse", "ListAnalyticsAccountLinksRequest", "ListAnalyticsAccountLinksResponse", "CreateAnalyticsAccountLinkRequest", "DeleteAnalyticsAccountLinkRequest", "SetPropertyServiceLevelRequest", "SetPropertyServiceLevelResponse", + "ReportPropertyUsageRequest", + "ReportPropertyUsageResponse", }, ) -class AnalyticsServiceLevel(proto.Enum): - r"""Various levels of service for Google Analytics. - - Values: - ANALYTICS_SERVICE_LEVEL_UNSPECIFIED (0): - Service level unspecified. - ANALYTICS_SERVICE_LEVEL_STANDARD (1): - The standard version of Google Analytics. - ANALYTICS_SERVICE_LEVEL_360 (2): - The premium version of Google Analytics. - """ - ANALYTICS_SERVICE_LEVEL_UNSPECIFIED = 0 - ANALYTICS_SERVICE_LEVEL_STANDARD = 1 - ANALYTICS_SERVICE_LEVEL_360 = 2 - - class GetOrganizationRequest(proto.Message): r"""Request message for GetOrganization RPC. @@ -67,6 +58,135 @@ class GetOrganizationRequest(proto.Message): ) +class ListOrganizationsRequest(proto.Message): + r"""Request message for ListOrganizations RPC. + + Attributes: + page_size (int): + Optional. The maximum number of organizations + to return in one call. The service may return + fewer than this value. + + If unspecified, at most 50 organizations will be + returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ListOrganizations call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListOrganizations`` must match the call that provided the + page token. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListOrganizationsResponse(proto.Message): + r"""Response message for ListOrganizations RPC. + + Attributes: + organizations (MutableSequence[google.ads.marketingplatform_admin_v1alpha.types.Organization]): + The Organization resource that the user has + access to, which includes the org id and display + name. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + organizations: MutableSequence[resources.Organization] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Organization, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class FindSalesPartnerManagedClientsRequest(proto.Message): + r"""Request message for FindSalesPartnerManagedClients RPC. + + Attributes: + organization (str): + Required. The name of the sales partner organization. + Format: organizations/{org_id} + is_active (bool): + Optional. If set, only active and just ended + clients will be returned. + """ + + organization: str = proto.Field( + proto.STRING, + number=1, + ) + is_active: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class FindSalesPartnerManagedClientsResponse(proto.Message): + r"""Response message for FindSalesPartnerManagedClients RPC. + + Attributes: + client_data (MutableSequence[google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsResponse.ClientData]): + The clients managed by the sales org. + """ + + class ClientData(proto.Message): + r"""Contains the client data. + + Attributes: + organization (google.ads.marketingplatform_admin_v1alpha.types.Organization): + The end client that has/had contract with the + requested sales org. + start_date (google.type.date_pb2.Date): + The start date of the contract between the + sales org and the end client. + end_date (google.type.date_pb2.Date): + The end date of the contract between the + sales org and the end client. + """ + + organization: resources.Organization = proto.Field( + proto.MESSAGE, + number=1, + message=resources.Organization, + ) + start_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=2, + message=date_pb2.Date, + ) + end_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=3, + message=date_pb2.Date, + ) + + client_data: MutableSequence[ClientData] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=ClientData, + ) + + class ListAnalyticsAccountLinksRequest(proto.Message): r"""Request message for ListAnalyticsAccountLinks RPC. @@ -203,10 +323,10 @@ class SetPropertyServiceLevelRequest(proto.Message): proto.STRING, number=2, ) - service_level: "AnalyticsServiceLevel" = proto.Field( + service_level: resources.AnalyticsServiceLevel = proto.Field( proto.ENUM, number=3, - enum="AnalyticsServiceLevel", + enum=resources.AnalyticsServiceLevel, ) @@ -214,4 +334,150 @@ class SetPropertyServiceLevelResponse(proto.Message): r"""Response message for SetPropertyServiceLevel RPC.""" +class ReportPropertyUsageRequest(proto.Message): + r"""Request message for ReportPropertyUsage RPC. + + Attributes: + organization (str): + Required. Specifies the organization whose property usage + will be listed. + + Format: organizations/{org_id} + month (str): + Required. The target month to list property + usages. + Format: YYYY-MM. For example, "2025-05". + """ + + organization: str = proto.Field( + proto.STRING, + number=1, + ) + month: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ReportPropertyUsageResponse(proto.Message): + r"""Response message for ReportPropertyUsage RPC. + + Attributes: + property_usages (MutableSequence[google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageResponse.PropertyUsage]): + Usage data for all properties in the + specified organization and month. + bill_info (google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageResponse.BillInfo): + Bill amount in the specified organization and + month. + Will be empty if user only has access to usage + data. + """ + + class PropertyUsage(proto.Message): + r"""Contains the count of events received by the property, along with + metadata that influences the volume of ``billable`` events. + + Attributes: + property (str): + The name of the Google Analytics Admin API property + resource. + + Format: + analyticsadmin.googleapis.com/properties/{property_id} + display_name (str): + The display name of the property. + account_id (int): + The ID of the property's parent account. + service_level (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsServiceLevel): + The service level of the property. + property_type (google.ads.marketingplatform_admin_v1alpha.types.AnalyticsPropertyType): + The subtype of the analytics property. This + affects the billable event count. + total_event_count (int): + Total event count that the property received + during the requested month. + billable_event_count (int): + The number of events for which the property + is billed in the requested month. + """ + + property: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + account_id: int = proto.Field( + proto.INT64, + number=3, + ) + service_level: resources.AnalyticsServiceLevel = proto.Field( + proto.ENUM, + number=4, + enum=resources.AnalyticsServiceLevel, + ) + property_type: resources.AnalyticsPropertyType = proto.Field( + proto.ENUM, + number=5, + enum=resources.AnalyticsPropertyType, + ) + total_event_count: int = proto.Field( + proto.INT64, + number=6, + ) + billable_event_count: int = proto.Field( + proto.INT64, + number=7, + ) + + class BillInfo(proto.Message): + r"""Contains the bill amount. + + Attributes: + base_fee (google.type.money_pb2.Money): + The amount of the monthly base fee. + event_fee (google.type.money_pb2.Money): + The amount of the event fee. + price_protection_credit (google.type.money_pb2.Money): + The amount of the price protection credit, + this is only available for eligible customers. + total (google.type.money_pb2.Money): + The total amount of the bill. + """ + + base_fee: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=1, + message=money_pb2.Money, + ) + event_fee: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=2, + message=money_pb2.Money, + ) + price_protection_credit: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=3, + message=money_pb2.Money, + ) + total: money_pb2.Money = proto.Field( + proto.MESSAGE, + number=4, + message=money_pb2.Money, + ) + + property_usages: MutableSequence[PropertyUsage] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=PropertyUsage, + ) + bill_info: BillInfo = proto.Field( + proto.MESSAGE, + number=2, + message=BillInfo, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py index 3273e02667e0..4dab191fa07d 100644 --- a/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py +++ b/packages/google-ads-marketingplatform-admin/google/ads/marketingplatform_admin_v1alpha/types/resources.py @@ -23,6 +23,8 @@ package="google.marketingplatform.admin.v1alpha", manifest={ "LinkVerificationState", + "AnalyticsServiceLevel", + "AnalyticsPropertyType", "Organization", "AnalyticsAccountLink", }, @@ -47,6 +49,41 @@ class LinkVerificationState(proto.Enum): LINK_VERIFICATION_STATE_NOT_VERIFIED = 2 +class AnalyticsServiceLevel(proto.Enum): + r"""Various levels of service for Google Analytics. + + Values: + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED (0): + Service level unspecified. + ANALYTICS_SERVICE_LEVEL_STANDARD (1): + The standard version of Google Analytics. + ANALYTICS_SERVICE_LEVEL_360 (2): + The premium version of Google Analytics. + """ + ANALYTICS_SERVICE_LEVEL_UNSPECIFIED = 0 + ANALYTICS_SERVICE_LEVEL_STANDARD = 1 + ANALYTICS_SERVICE_LEVEL_360 = 2 + + +class AnalyticsPropertyType(proto.Enum): + r"""Types of the Google Analytics Property. + + Values: + ANALYTICS_PROPERTY_TYPE_UNSPECIFIED (0): + Unknown or unspecified property type + ANALYTICS_PROPERTY_TYPE_ORDINARY (1): + Ordinary Google Analytics property + ANALYTICS_PROPERTY_TYPE_SUBPROPERTY (2): + Google Analytics subproperty + ANALYTICS_PROPERTY_TYPE_ROLLUP (3): + Google Analytics rollup property + """ + ANALYTICS_PROPERTY_TYPE_UNSPECIFIED = 0 + ANALYTICS_PROPERTY_TYPE_ORDINARY = 1 + ANALYTICS_PROPERTY_TYPE_SUBPROPERTY = 2 + ANALYTICS_PROPERTY_TYPE_ROLLUP = 3 + + class Organization(proto.Message): r"""A resource message representing a Google Marketing Platform organization. diff --git a/packages/google-ads-marketingplatform-admin/noxfile.py b/packages/google-ads-marketingplatform-admin/noxfile.py index 44c7f351a55e..0a8f35d7b244 100644 --- a/packages/google-ads-marketingplatform-admin/noxfile.py +++ b/packages/google-ads-marketingplatform-admin/noxfile.py @@ -27,6 +27,10 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") + ALL_PYTHON = [ "3.7", "3.8", diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py index 9ce0c18b807d..cd6c6687119c 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_async.py @@ -53,4 +53,5 @@ async def sample_create_analytics_account_link(): # Handle the response print(response) + # [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py index 6647383a9d8f..14bc721204c5 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_create_analytics_account_link_sync.py @@ -53,4 +53,5 @@ def sample_create_analytics_account_link(): # Handle the response print(response) + # [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_CreateAnalyticsAccountLink_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_async.py new file mode 100644 index 000000000000..e799ca7d1181 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FindSalesPartnerManagedClients +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_FindSalesPartnerManagedClients_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_find_sales_partner_managed_clients(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.FindSalesPartnerManagedClientsRequest( + organization="organization_value", + ) + + # Make the request + response = await client.find_sales_partner_managed_clients(request=request) + + # Handle the response + print(response) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_FindSalesPartnerManagedClients_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_sync.py new file mode 100644 index 000000000000..06c74d57b9cb --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FindSalesPartnerManagedClients +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_FindSalesPartnerManagedClients_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_find_sales_partner_managed_clients(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.FindSalesPartnerManagedClientsRequest( + organization="organization_value", + ) + + # Make the request + response = client.find_sales_partner_managed_clients(request=request) + + # Handle the response + print(response) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_FindSalesPartnerManagedClients_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py index d439e74fb019..cbda174ca5c1 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_async.py @@ -49,4 +49,5 @@ async def sample_get_organization(): # Handle the response print(response) + # [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py index 3f5013acb546..70c86b4c8363 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_get_organization_sync.py @@ -49,4 +49,5 @@ def sample_get_organization(): # Handle the response print(response) + # [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_GetOrganization_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py index 6d8fa1e81cf2..1444b9fad0ee 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_async.py @@ -50,4 +50,5 @@ async def sample_list_analytics_account_links(): async for response in page_result: print(response) + # [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py index 21635e24ea46..b784557e8153 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py @@ -50,4 +50,5 @@ def sample_list_analytics_account_links(): for response in page_result: print(response) + # [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListAnalyticsAccountLinks_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_async.py new file mode 100644 index 000000000000..577d7eee8422 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOrganizations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListOrganizations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_list_organizations(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListOrganizationsRequest() + + # Make the request + page_result = client.list_organizations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListOrganizations_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_sync.py new file mode 100644 index 000000000000..9084b1d75c4e --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListOrganizations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListOrganizations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_list_organizations(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ListOrganizationsRequest() + + # Make the request + page_result = client.list_organizations(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListOrganizations_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_async.py new file mode 100644 index 000000000000..e19a56866e58 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReportPropertyUsage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ReportPropertyUsage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +async def sample_report_property_usage(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ReportPropertyUsageRequest( + organization="organization_value", + month="month_value", + ) + + # Make the request + response = await client.report_property_usage(request=request) + + # Handle the response + print(response) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ReportPropertyUsage_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_sync.py new file mode 100644 index 000000000000..c174bb012400 --- /dev/null +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ReportPropertyUsage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-ads-marketingplatform-admin + + +# [START marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ReportPropertyUsage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.ads import marketingplatform_admin_v1alpha + + +def sample_report_property_usage(): + # Create a client + client = marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient() + + # Initialize request argument(s) + request = marketingplatform_admin_v1alpha.ReportPropertyUsageRequest( + organization="organization_value", + month="month_value", + ) + + # Make the request + response = client.report_property_usage(request=request) + + # Handle the response + print(response) + + +# [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ReportPropertyUsage_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py index 002b705efd82..8f7cf8f7e2d5 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_async.py @@ -51,4 +51,5 @@ async def sample_set_property_service_level(): # Handle the response print(response) + # [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_async] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py index 44a8436b3131..4fa92ff1430b 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_set_property_service_level_sync.py @@ -51,4 +51,5 @@ def sample_set_property_service_level(): # Handle the response print(response) + # [END marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_SetPropertyServiceLevel_sync] diff --git a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json index d8e38d21950d..443b2c434143 100644 --- a/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json +++ b/packages/google-ads-marketingplatform-admin/samples/generated_samples/snippet_metadata_google.marketingplatform.admin.v1alpha.json @@ -335,6 +335,159 @@ ], "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_delete_analytics_account_link_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.find_sales_partner_managed_clients", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.FindSalesPartnerManagedClients", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "FindSalesPartnerManagedClients" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsResponse", + "shortName": "find_sales_partner_managed_clients" + }, + "description": "Sample for FindSalesPartnerManagedClients", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_FindSalesPartnerManagedClients_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.find_sales_partner_managed_clients", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.FindSalesPartnerManagedClients", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "FindSalesPartnerManagedClients" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.FindSalesPartnerManagedClientsResponse", + "shortName": "find_sales_partner_managed_clients" + }, + "description": "Sample for FindSalesPartnerManagedClients", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_FindSalesPartnerManagedClients_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_find_sales_partner_managed_clients_sync.py" + }, { "canonical": true, "clientMethod": { @@ -657,6 +810,320 @@ ], "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_analytics_account_links_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.list_organizations", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListOrganizations", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListOrganizations" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListOrganizationsAsyncPager", + "shortName": "list_organizations" + }, + "description": "Sample for ListOrganizations", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListOrganizations_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.list_organizations", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ListOrganizations", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ListOrganizations" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ListOrganizationsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.services.marketingplatform_admin_service.pagers.ListOrganizationsPager", + "shortName": "list_organizations" + }, + "description": "Sample for ListOrganizations", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ListOrganizations_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_list_organizations_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient", + "shortName": "MarketingplatformAdminServiceAsyncClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceAsyncClient.report_property_usage", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ReportPropertyUsage", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ReportPropertyUsage" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageRequest" + }, + { + "name": "organization", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageResponse", + "shortName": "report_property_usage" + }, + "description": "Sample for ReportPropertyUsage", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ReportPropertyUsage_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient", + "shortName": "MarketingplatformAdminServiceClient" + }, + "fullName": "google.ads.marketingplatform_admin_v1alpha.MarketingplatformAdminServiceClient.report_property_usage", + "method": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService.ReportPropertyUsage", + "service": { + "fullName": "google.marketingplatform.admin.v1alpha.MarketingplatformAdminService", + "shortName": "MarketingplatformAdminService" + }, + "shortName": "ReportPropertyUsage" + }, + "parameters": [ + { + "name": "request", + "type": "google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageRequest" + }, + { + "name": "organization", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.ads.marketingplatform_admin_v1alpha.types.ReportPropertyUsageResponse", + "shortName": "report_property_usage" + }, + "description": "Sample for ReportPropertyUsage", + "file": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "marketingplatformadmin_v1alpha_generated_MarketingplatformAdminService_ReportPropertyUsage_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "marketingplatformadmin_v1alpha_generated_marketingplatform_admin_service_report_property_usage_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py index 0820dcaf1b40..274512a5196f 100644 --- a/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py +++ b/packages/google-ads-marketingplatform-admin/scripts/fixup_marketingplatform_admin_v1alpha_keywords.py @@ -41,8 +41,11 @@ class marketingplatform_adminCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'create_analytics_account_link': ('parent', 'analytics_account_link', ), 'delete_analytics_account_link': ('name', ), + 'find_sales_partner_managed_clients': ('organization', 'is_active', ), 'get_organization': ('name', ), 'list_analytics_account_links': ('parent', 'page_size', 'page_token', ), + 'list_organizations': ('page_size', 'page_token', ), + 'report_property_usage': ('organization', 'month', ), 'set_property_service_level': ('analytics_account_link', 'analytics_property', 'service_level', ), } diff --git a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py index 2fd000b5ac91..ad3d38f109ac 100644 --- a/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py +++ b/packages/google-ads-marketingplatform-admin/tests/unit/gapic/marketingplatform_admin_v1alpha/test_marketingplatform_admin_service.py @@ -1535,11 +1535,11 @@ async def test_get_organization_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - marketingplatform_admin.ListAnalyticsAccountLinksRequest, + marketingplatform_admin.ListOrganizationsRequest, dict, ], ) -def test_list_analytics_account_links(request_type, transport: str = "grpc"): +def test_list_organizations(request_type, transport: str = "grpc"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1551,26 +1551,26 @@ def test_list_analytics_account_links(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" + type(client.transport.list_organizations), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + call.return_value = marketingplatform_admin.ListOrganizationsResponse( next_page_token="next_page_token_value", ) - response = client.list_analytics_account_links(request) + response = client.list_organizations(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + request = marketingplatform_admin.ListOrganizationsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert isinstance(response, pagers.ListOrganizationsPager) assert response.next_page_token == "next_page_token_value" -def test_list_analytics_account_links_non_empty_request_with_auto_populated_field(): +def test_list_organizations_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = MarketingplatformAdminServiceClient( @@ -1581,28 +1581,26 @@ def test_list_analytics_account_links_non_empty_request_with_auto_populated_fiel # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( - parent="parent_value", + request = marketingplatform_admin.ListOrganizationsRequest( page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" + type(client.transport.list_organizations), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_analytics_account_links(request=request) + client.list_organizations(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest( - parent="parent_value", + assert args[0] == marketingplatform_admin.ListOrganizationsRequest( page_token="page_token_value", ) -def test_list_analytics_account_links_use_cached_wrapped_rpc(): +def test_list_organizations_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -1617,8 +1615,7 @@ def test_list_analytics_account_links_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_analytics_account_links - in client._transport._wrapped_methods + client._transport.list_organizations in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -1627,15 +1624,15 @@ def test_list_analytics_account_links_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_analytics_account_links + client._transport.list_organizations ] = mock_rpc request = {} - client.list_analytics_account_links(request) + client.list_organizations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_analytics_account_links(request) + client.list_organizations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1643,7 +1640,7 @@ def test_list_analytics_account_links_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( +async def test_list_organizations_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1660,7 +1657,7 @@ async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_analytics_account_links + client._client._transport.list_organizations in client._client._transport._wrapped_methods ) @@ -1668,16 +1665,16 @@ async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_analytics_account_links + client._client._transport.list_organizations ] = mock_rpc request = {} - await client.list_analytics_account_links(request) + await client.list_organizations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_analytics_account_links(request) + await client.list_organizations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -1685,9 +1682,9 @@ async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_analytics_account_links_async( +async def test_list_organizations_async( transport: str = "grpc_asyncio", - request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, + request_type=marketingplatform_admin.ListOrganizationsRequest, ): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -1700,184 +1697,33 @@ async def test_list_analytics_account_links_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" + type(client.transport.list_organizations), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - marketingplatform_admin.ListAnalyticsAccountLinksResponse( + marketingplatform_admin.ListOrganizationsResponse( next_page_token="next_page_token_value", ) ) - response = await client.list_analytics_account_links(request) + response = await client.list_organizations(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() + request = marketingplatform_admin.ListOrganizationsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAnalyticsAccountLinksAsyncPager) + assert isinstance(response, pagers.ListOrganizationsAsyncPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_list_analytics_account_links_async_from_dict(): - await test_list_analytics_account_links_async(request_type=dict) - - -def test_list_analytics_account_links_field_headers(): - client = MarketingplatformAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" - ) as call: - call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() - client.list_analytics_account_links(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_analytics_account_links_field_headers_async(): - client = MarketingplatformAdminServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - marketingplatform_admin.ListAnalyticsAccountLinksResponse() - ) - await client.list_analytics_account_links(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_analytics_account_links_flattened(): - client = MarketingplatformAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_analytics_account_links( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_analytics_account_links_flattened_error(): - client = MarketingplatformAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_analytics_account_links( - marketingplatform_admin.ListAnalyticsAccountLinksRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_analytics_account_links_flattened_async(): - client = MarketingplatformAdminServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - marketingplatform_admin.ListAnalyticsAccountLinksResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_analytics_account_links( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_analytics_account_links_flattened_error_async(): - client = MarketingplatformAdminServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_analytics_account_links( - marketingplatform_admin.ListAnalyticsAccountLinksRequest(), - parent="parent_value", - ) +async def test_list_organizations_async_from_dict(): + await test_list_organizations_async(request_type=dict) -def test_list_analytics_account_links_pager(transport_name: str = "grpc"): +def test_list_organizations_pager(transport_name: str = "grpc"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -1885,32 +1731,32 @@ def test_list_analytics_account_links_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" + type(client.transport.list_organizations), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), + resources.Organization(), ], next_page_token="abc", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[], + marketingplatform_admin.ListOrganizationsResponse( + organizations=[], next_page_token="def", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), ], next_page_token="ghi", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), ], ), RuntimeError, @@ -1919,12 +1765,7 @@ def test_list_analytics_account_links_pager(transport_name: str = "grpc"): expected_metadata = () retry = retries.Retry() timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_analytics_account_links( - request={}, retry=retry, timeout=timeout - ) + pager = client.list_organizations(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -1932,10 +1773,10 @@ def test_list_analytics_account_links_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) + assert all(isinstance(i, resources.Organization) for i in results) -def test_list_analytics_account_links_pages(transport_name: str = "grpc"): +def test_list_organizations_pages(transport_name: str = "grpc"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -1943,82 +1784,82 @@ def test_list_analytics_account_links_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_analytics_account_links), "__call__" + type(client.transport.list_organizations), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), + resources.Organization(), ], next_page_token="abc", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[], + marketingplatform_admin.ListOrganizationsResponse( + organizations=[], next_page_token="def", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), ], next_page_token="ghi", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), ], ), RuntimeError, ) - pages = list(client.list_analytics_account_links(request={}).pages) + pages = list(client.list_organizations(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_analytics_account_links_async_pager(): +async def test_list_organizations_async_pager(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_analytics_account_links), + type(client.transport.list_organizations), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), + resources.Organization(), ], next_page_token="abc", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[], + marketingplatform_admin.ListOrganizationsResponse( + organizations=[], next_page_token="def", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), ], next_page_token="ghi", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), ], ), RuntimeError, ) - async_pager = await client.list_analytics_account_links( + async_pager = await client.list_organizations( request={}, ) assert async_pager.next_page_token == "abc" @@ -2027,45 +1868,45 @@ async def test_list_analytics_account_links_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, resources.AnalyticsAccountLink) for i in responses) + assert all(isinstance(i, resources.Organization) for i in responses) @pytest.mark.asyncio -async def test_list_analytics_account_links_async_pages(): +async def test_list_organizations_async_pages(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_analytics_account_links), + type(client.transport.list_organizations), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), + resources.Organization(), ], next_page_token="abc", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[], + marketingplatform_admin.ListOrganizationsResponse( + organizations=[], next_page_token="def", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), ], next_page_token="ghi", ), - marketingplatform_admin.ListAnalyticsAccountLinksResponse( - analytics_account_links=[ - resources.AnalyticsAccountLink(), - resources.AnalyticsAccountLink(), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), ], ), RuntimeError, @@ -2074,7 +1915,7 @@ async def test_list_analytics_account_links_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_analytics_account_links(request={}) + await client.list_organizations(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -2084,11 +1925,11 @@ async def test_list_analytics_account_links_async_pages(): @pytest.mark.parametrize( "request_type", [ - marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + marketingplatform_admin.FindSalesPartnerManagedClientsRequest, dict, ], ) -def test_create_analytics_account_link(request_type, transport: str = "grpc"): +def test_find_sales_partner_managed_clients(request_type, transport: str = "grpc"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2100,35 +1941,27 @@ def test_create_analytics_account_link(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_analytics_account_link), "__call__" + type(client.transport.find_sales_partner_managed_clients), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.AnalyticsAccountLink( - name="name_value", - analytics_account="analytics_account_value", - display_name="display_name_value", - link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + call.return_value = ( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse() ) - response = client.create_analytics_account_link(request) + response = client.find_sales_partner_managed_clients(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + request = marketingplatform_admin.FindSalesPartnerManagedClientsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AnalyticsAccountLink) - assert response.name == "name_value" - assert response.analytics_account == "analytics_account_value" - assert response.display_name == "display_name_value" - assert ( - response.link_verification_state - == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + assert isinstance( + response, marketingplatform_admin.FindSalesPartnerManagedClientsResponse ) -def test_create_analytics_account_link_non_empty_request_with_auto_populated_field(): +def test_find_sales_partner_managed_clients_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = MarketingplatformAdminServiceClient( @@ -2139,26 +1972,26 @@ def test_create_analytics_account_link_non_empty_request_with_auto_populated_fie # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest( - parent="parent_value", + request = marketingplatform_admin.FindSalesPartnerManagedClientsRequest( + organization="organization_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_analytics_account_link), "__call__" + type(client.transport.find_sales_partner_managed_clients), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_analytics_account_link(request=request) + client.find_sales_partner_managed_clients(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest( - parent="parent_value", + assert args[0] == marketingplatform_admin.FindSalesPartnerManagedClientsRequest( + organization="organization_value", ) -def test_create_analytics_account_link_use_cached_wrapped_rpc(): +def test_find_sales_partner_managed_clients_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2173,7 +2006,7 @@ def test_create_analytics_account_link_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_analytics_account_link + client._transport.find_sales_partner_managed_clients in client._transport._wrapped_methods ) @@ -2183,15 +2016,15 @@ def test_create_analytics_account_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_analytics_account_link + client._transport.find_sales_partner_managed_clients ] = mock_rpc request = {} - client.create_analytics_account_link(request) + client.find_sales_partner_managed_clients(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_analytics_account_link(request) + client.find_sales_partner_managed_clients(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2199,7 +2032,7 @@ def test_create_analytics_account_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( +async def test_find_sales_partner_managed_clients_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2216,7 +2049,7 @@ async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_analytics_account_link + client._client._transport.find_sales_partner_managed_clients in client._client._transport._wrapped_methods ) @@ -2224,16 +2057,16 @@ async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_analytics_account_link + client._client._transport.find_sales_partner_managed_clients ] = mock_rpc request = {} - await client.create_analytics_account_link(request) + await client.find_sales_partner_managed_clients(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_analytics_account_link(request) + await client.find_sales_partner_managed_clients(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2241,9 +2074,9 @@ async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_analytics_account_link_async( +async def test_find_sales_partner_managed_clients_async( transport: str = "grpc_asyncio", - request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + request_type=marketingplatform_admin.FindSalesPartnerManagedClientsRequest, ): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -2256,58 +2089,50 @@ async def test_create_analytics_account_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_analytics_account_link), "__call__" + type(client.transport.find_sales_partner_managed_clients), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AnalyticsAccountLink( - name="name_value", - analytics_account="analytics_account_value", - display_name="display_name_value", - link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, - ) + marketingplatform_admin.FindSalesPartnerManagedClientsResponse() ) - response = await client.create_analytics_account_link(request) + response = await client.find_sales_partner_managed_clients(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + request = marketingplatform_admin.FindSalesPartnerManagedClientsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, resources.AnalyticsAccountLink) - assert response.name == "name_value" - assert response.analytics_account == "analytics_account_value" - assert response.display_name == "display_name_value" - assert ( - response.link_verification_state - == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + assert isinstance( + response, marketingplatform_admin.FindSalesPartnerManagedClientsResponse ) @pytest.mark.asyncio -async def test_create_analytics_account_link_async_from_dict(): - await test_create_analytics_account_link_async(request_type=dict) +async def test_find_sales_partner_managed_clients_async_from_dict(): + await test_find_sales_partner_managed_clients_async(request_type=dict) -def test_create_analytics_account_link_field_headers(): +def test_find_sales_partner_managed_clients_field_headers(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + request = marketingplatform_admin.FindSalesPartnerManagedClientsRequest() - request.parent = "parent_value" + request.organization = "organization_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_analytics_account_link), "__call__" + type(client.transport.find_sales_partner_managed_clients), "__call__" ) as call: - call.return_value = resources.AnalyticsAccountLink() - client.create_analytics_account_link(request) + call.return_value = ( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse() + ) + client.find_sales_partner_managed_clients(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2318,30 +2143,30 @@ def test_create_analytics_account_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "organization=organization_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_analytics_account_link_field_headers_async(): +async def test_find_sales_partner_managed_clients_field_headers_async(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + request = marketingplatform_admin.FindSalesPartnerManagedClientsRequest() - request.parent = "parent_value" + request.organization = "organization_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_analytics_account_link), "__call__" + type(client.transport.find_sales_partner_managed_clients), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AnalyticsAccountLink() + marketingplatform_admin.FindSalesPartnerManagedClientsResponse() ) - await client.create_analytics_account_link(request) + await client.find_sales_partner_managed_clients(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2352,114 +2177,18 @@ async def test_create_analytics_account_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "organization=organization_value", ) in kw["metadata"] -def test_create_analytics_account_link_flattened(): - client = MarketingplatformAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_analytics_account_link), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.AnalyticsAccountLink() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_analytics_account_link( - parent="parent_value", - analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].analytics_account_link - mock_val = resources.AnalyticsAccountLink(name="name_value") - assert arg == mock_val - - -def test_create_analytics_account_link_flattened_error(): - client = MarketingplatformAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_analytics_account_link( - marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), - parent="parent_value", - analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_create_analytics_account_link_flattened_async(): - client = MarketingplatformAdminServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_analytics_account_link), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.AnalyticsAccountLink() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.AnalyticsAccountLink() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_analytics_account_link( - parent="parent_value", - analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].analytics_account_link - mock_val = resources.AnalyticsAccountLink(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_analytics_account_link_flattened_error_async(): - client = MarketingplatformAdminServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_analytics_account_link( - marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), - parent="parent_value", - analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), - ) - - @pytest.mark.parametrize( "request_type", [ - marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + marketingplatform_admin.ListAnalyticsAccountLinksRequest, dict, ], ) -def test_delete_analytics_account_link(request_type, transport: str = "grpc"): +def test_list_analytics_account_links(request_type, transport: str = "grpc"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2471,23 +2200,26 @@ def test_delete_analytics_account_link(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_analytics_account_link), "__call__" + type(client.transport.list_analytics_account_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_analytics_account_link(request) + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + response = client.list_analytics_account_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListAnalyticsAccountLinksPager) + assert response.next_page_token == "next_page_token_value" -def test_delete_analytics_account_link_non_empty_request_with_auto_populated_field(): +def test_list_analytics_account_links_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = MarketingplatformAdminServiceClient( @@ -2498,26 +2230,28 @@ def test_delete_analytics_account_link_non_empty_request_with_auto_populated_fie # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( - name="name_value", + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_analytics_account_link), "__call__" + type(client.transport.list_analytics_account_links), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_analytics_account_link(request=request) + client.list_analytics_account_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( - name="name_value", + assert args[0] == marketingplatform_admin.ListAnalyticsAccountLinksRequest( + parent="parent_value", + page_token="page_token_value", ) -def test_delete_analytics_account_link_use_cached_wrapped_rpc(): +def test_list_analytics_account_links_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2532,7 +2266,7 @@ def test_delete_analytics_account_link_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_analytics_account_link + client._transport.list_analytics_account_links in client._transport._wrapped_methods ) @@ -2542,15 +2276,15 @@ def test_delete_analytics_account_link_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_analytics_account_link + client._transport.list_analytics_account_links ] = mock_rpc request = {} - client.delete_analytics_account_link(request) + client.list_analytics_account_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_analytics_account_link(request) + client.list_analytics_account_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2558,7 +2292,7 @@ def test_delete_analytics_account_link_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( +async def test_list_analytics_account_links_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2575,7 +2309,7 @@ async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_analytics_account_link + client._client._transport.list_analytics_account_links in client._client._transport._wrapped_methods ) @@ -2583,16 +2317,16 @@ async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_analytics_account_link + client._client._transport.list_analytics_account_links ] = mock_rpc request = {} - await client.delete_analytics_account_link(request) + await client.list_analytics_account_links(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.delete_analytics_account_link(request) + await client.list_analytics_account_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2600,9 +2334,9 @@ async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_analytics_account_link_async( +async def test_list_analytics_account_links_async( transport: str = "grpc_asyncio", - request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + request_type=marketingplatform_admin.ListAnalyticsAccountLinksRequest, ): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -2615,44 +2349,49 @@ async def test_delete_analytics_account_link_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_analytics_account_link), "__call__" + type(client.transport.list_analytics_account_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_analytics_account_link(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_analytics_account_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListAnalyticsAccountLinksAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_delete_analytics_account_link_async_from_dict(): - await test_delete_analytics_account_link_async(request_type=dict) +async def test_list_analytics_account_links_async_from_dict(): + await test_list_analytics_account_links_async(request_type=dict) -def test_delete_analytics_account_link_field_headers(): +def test_list_analytics_account_links_field_headers(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_analytics_account_link), "__call__" + type(client.transport.list_analytics_account_links), "__call__" ) as call: - call.return_value = None - client.delete_analytics_account_link(request) + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() + client.list_analytics_account_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2663,28 +2402,30 @@ def test_delete_analytics_account_link_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_analytics_account_link_field_headers_async(): +async def test_list_analytics_account_links_field_headers_async(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + request = marketingplatform_admin.ListAnalyticsAccountLinksRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_analytics_account_link), "__call__" + type(client.transport.list_analytics_account_links), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_analytics_account_link(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) + await client.list_analytics_account_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2695,37 +2436,37 @@ async def test_delete_analytics_account_link_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_analytics_account_link_flattened(): +def test_list_analytics_account_links_flattened(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_analytics_account_link), "__call__" + type(client.transport.list_analytics_account_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_analytics_account_link( - name="name_value", + client.list_analytics_account_links( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_analytics_account_link_flattened_error(): +def test_list_analytics_account_links_flattened_error(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2733,43 +2474,45 @@ def test_delete_analytics_account_link_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_analytics_account_link( - marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), - name="name_value", + client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_analytics_account_link_flattened_async(): +async def test_list_analytics_account_links_flattened_async(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_analytics_account_link), "__call__" + type(client.transport.list_analytics_account_links), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = marketingplatform_admin.ListAnalyticsAccountLinksResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListAnalyticsAccountLinksResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_analytics_account_link( - name="name_value", + response = await client.list_analytics_account_links( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_analytics_account_link_flattened_error_async(): +async def test_list_analytics_account_links_flattened_error_async(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -2777,95 +2520,309 @@ async def test_delete_analytics_account_link_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_analytics_account_link( - marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), - name="name_value", + await client.list_analytics_account_links( + marketingplatform_admin.ListAnalyticsAccountLinksRequest(), + parent="parent_value", ) -@pytest.mark.parametrize( - "request_type", - [ - marketingplatform_admin.SetPropertyServiceLevelRequest, - dict, - ], -) -def test_set_property_service_level(request_type, transport: str = "grpc"): +def test_list_analytics_account_links_pager(transport_name: str = "grpc"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_property_service_level), "__call__" + type(client.transport.list_analytics_account_links), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() - response = client.set_property_service_level(request) + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = marketingplatform_admin.SetPropertyServiceLevelRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_analytics_account_links( + request={}, retry=retry, timeout=timeout + ) - # Establish that the response is the type that we expect. - assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in results) -def test_set_property_service_level_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. + +def test_list_analytics_account_links_pages(transport_name: str = "grpc"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = marketingplatform_admin.SetPropertyServiceLevelRequest( - analytics_account_link="analytics_account_link_value", - analytics_property="analytics_property_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_analytics_account_links(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pager(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_property_service_level), "__call__" + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, ) - client.set_property_service_level(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest( - analytics_account_link="analytics_account_link_value", - analytics_property="analytics_property_value", + async_pager = await client.list_analytics_account_links( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, resources.AnalyticsAccountLink) for i in responses) -def test_set_property_service_level_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = MarketingplatformAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.asyncio +async def test_list_analytics_account_links_async_pages(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_analytics_account_links), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[], + next_page_token="def", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListAnalyticsAccountLinksResponse( + analytics_account_links=[ + resources.AnalyticsAccountLink(), + resources.AnalyticsAccountLink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_analytics_account_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.CreateAnalyticsAccountLinkRequest, + dict, + ], +) +def test_create_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) + response = client.create_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) + + +def test_create_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.CreateAnalyticsAccountLinkRequest( + parent="parent_value", + ) + + +def test_create_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert ( - client._transport.set_property_service_level + client._transport.create_analytics_account_link in client._transport._wrapped_methods ) @@ -2875,15 +2832,15 @@ def test_set_property_service_level_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.set_property_service_level + client._transport.create_analytics_account_link ] = mock_rpc request = {} - client.set_property_service_level(request) + client.create_analytics_account_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_property_service_level(request) + client.create_analytics_account_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2891,7 +2848,7 @@ def test_set_property_service_level_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_set_property_service_level_async_use_cached_wrapped_rpc( +async def test_create_analytics_account_link_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2908,7 +2865,7 @@ async def test_set_property_service_level_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.set_property_service_level + client._client._transport.create_analytics_account_link in client._client._transport._wrapped_methods ) @@ -2916,16 +2873,16 @@ async def test_set_property_service_level_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.set_property_service_level + client._client._transport.create_analytics_account_link ] = mock_rpc request = {} - await client.set_property_service_level(request) + await client.create_analytics_account_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.set_property_service_level(request) + await client.create_analytics_account_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2933,9 +2890,9 @@ async def test_set_property_service_level_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_set_property_service_level_async( +async def test_create_analytics_account_link_async( transport: str = "grpc_asyncio", - request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, + request_type=marketingplatform_admin.CreateAnalyticsAccountLinkRequest, ): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -2948,46 +2905,58 @@ async def test_set_property_service_level_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_property_service_level), "__call__" + type(client.transport.create_analytics_account_link), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - marketingplatform_admin.SetPropertyServiceLevelResponse() + resources.AnalyticsAccountLink( + name="name_value", + analytics_account="analytics_account_value", + display_name="display_name_value", + link_verification_state=resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED, + ) ) - response = await client.set_property_service_level(request) + response = await client.create_analytics_account_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = marketingplatform_admin.SetPropertyServiceLevelRequest() + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + assert isinstance(response, resources.AnalyticsAccountLink) + assert response.name == "name_value" + assert response.analytics_account == "analytics_account_value" + assert response.display_name == "display_name_value" + assert ( + response.link_verification_state + == resources.LinkVerificationState.LINK_VERIFICATION_STATE_VERIFIED + ) @pytest.mark.asyncio -async def test_set_property_service_level_async_from_dict(): - await test_set_property_service_level_async(request_type=dict) +async def test_create_analytics_account_link_async_from_dict(): + await test_create_analytics_account_link_async(request_type=dict) -def test_set_property_service_level_field_headers(): +def test_create_analytics_account_link_field_headers(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = marketingplatform_admin.SetPropertyServiceLevelRequest() + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() - request.analytics_account_link = "analytics_account_link_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_property_service_level), "__call__" + type(client.transport.create_analytics_account_link), "__call__" ) as call: - call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() - client.set_property_service_level(request) + call.return_value = resources.AnalyticsAccountLink() + client.create_analytics_account_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2998,30 +2967,30 @@ def test_set_property_service_level_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "analytics_account_link=analytics_account_link_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_set_property_service_level_field_headers_async(): +async def test_create_analytics_account_link_field_headers_async(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = marketingplatform_admin.SetPropertyServiceLevelRequest() + request = marketingplatform_admin.CreateAnalyticsAccountLinkRequest() - request.analytics_account_link = "analytics_account_link_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_property_service_level), "__call__" + type(client.transport.create_analytics_account_link), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - marketingplatform_admin.SetPropertyServiceLevelResponse() + resources.AnalyticsAccountLink() ) - await client.set_property_service_level(request) + await client.create_analytics_account_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3032,37 +3001,41 @@ async def test_set_property_service_level_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "analytics_account_link=analytics_account_link_value", + "parent=parent_value", ) in kw["metadata"] -def test_set_property_service_level_flattened(): +def test_create_analytics_account_link_flattened(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_property_service_level), "__call__" + type(client.transport.create_analytics_account_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + call.return_value = resources.AnalyticsAccountLink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.set_property_service_level( - analytics_account_link="analytics_account_link_value", + client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].analytics_account_link - mock_val = "analytics_account_link_value" + mock_val = resources.AnalyticsAccountLink(name="name_value") assert arg == mock_val -def test_set_property_service_level_flattened_error(): +def test_create_analytics_account_link_flattened_error(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3070,45 +3043,721 @@ def test_set_property_service_level_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_property_service_level( - marketingplatform_admin.SetPropertyServiceLevelRequest(), - analytics_account_link="analytics_account_link_value", + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), ) @pytest.mark.asyncio -async def test_set_property_service_level_flattened_async(): +async def test_create_analytics_account_link_flattened_async(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.set_property_service_level), "__call__" + type(client.transport.create_analytics_account_link), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + call.return_value = resources.AnalyticsAccountLink() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - marketingplatform_admin.SetPropertyServiceLevelResponse() + resources.AnalyticsAccountLink() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.set_property_service_level( - analytics_account_link="analytics_account_link_value", + response = await client.create_analytics_account_link( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].analytics_account_link - mock_val = "analytics_account_link_value" + mock_val = resources.AnalyticsAccountLink(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_set_property_service_level_flattened_error_async(): +async def test_create_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, + dict, + ], +) +def test_delete_analytics_account_link(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_analytics_account_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_analytics_account_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.DeleteAnalyticsAccountLinkRequest( + name="name_value", + ) + + +def test_delete_analytics_account_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_analytics_account_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_async_from_dict(): + await test_delete_analytics_account_link_async(request_type=dict) + + +def test_delete_analytics_account_link_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = None + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.DeleteAnalyticsAccountLinkRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_analytics_account_link_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_analytics_account_link_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_analytics_account_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_analytics_account_link( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_analytics_account_link_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.SetPropertyServiceLevelRequest, + dict, + ], +) +def test_set_property_service_level(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + response = client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +def test_set_property_service_level_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_property_service_level(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.SetPropertyServiceLevelRequest( + analytics_account_link="analytics_account_link_value", + analytics_property="analytics_property_value", + ) + + +def test_set_property_service_level_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.set_property_service_level + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.set_property_service_level + ] = mock_rpc + request = {} + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.set_property_service_level + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_property_service_level + ] = mock_rpc + + request = {} + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.set_property_service_level(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_set_property_service_level_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + response = await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.SetPropertyServiceLevelResponse) + + +@pytest.mark.asyncio +async def test_set_property_service_level_async_from_dict(): + await test_set_property_service_level_async(request_type=dict) + + +def test_set_property_service_level_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_property_service_level_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.SetPropertyServiceLevelRequest() + + request.analytics_account_link = "analytics_account_link_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + await client.set_property_service_level(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "analytics_account_link=analytics_account_link_value", + ) in kw["metadata"] + + +def test_set_property_service_level_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +def test_set_property_service_level_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", + ) + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.set_property_service_level), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.SetPropertyServiceLevelResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_property_service_level( + analytics_account_link="analytics_account_link_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].analytics_account_link + mock_val = "analytics_account_link_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_property_service_level_flattened_error_async(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3122,6 +3771,345 @@ async def test_set_property_service_level_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ReportPropertyUsageRequest, + dict, + ], +) +def test_report_property_usage(request_type, transport: str = "grpc"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ReportPropertyUsageResponse() + response = client.report_property_usage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ReportPropertyUsageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.ReportPropertyUsageResponse) + + +def test_report_property_usage_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = marketingplatform_admin.ReportPropertyUsageRequest( + organization="organization_value", + month="month_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.report_property_usage(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == marketingplatform_admin.ReportPropertyUsageRequest( + organization="organization_value", + month="month_value", + ) + + +def test_report_property_usage_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.report_property_usage + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.report_property_usage + ] = mock_rpc + request = {} + client.report_property_usage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.report_property_usage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_report_property_usage_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.report_property_usage + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.report_property_usage + ] = mock_rpc + + request = {} + await client.report_property_usage(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.report_property_usage(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_report_property_usage_async( + transport: str = "grpc_asyncio", + request_type=marketingplatform_admin.ReportPropertyUsageRequest, +): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ReportPropertyUsageResponse() + ) + response = await client.report_property_usage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = marketingplatform_admin.ReportPropertyUsageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.ReportPropertyUsageResponse) + + +@pytest.mark.asyncio +async def test_report_property_usage_async_from_dict(): + await test_report_property_usage_async(request_type=dict) + + +def test_report_property_usage_field_headers(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ReportPropertyUsageRequest() + + request.organization = "organization_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + call.return_value = marketingplatform_admin.ReportPropertyUsageResponse() + client.report_property_usage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "organization=organization_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_report_property_usage_field_headers_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = marketingplatform_admin.ReportPropertyUsageRequest() + + request.organization = "organization_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ReportPropertyUsageResponse() + ) + await client.report_property_usage(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "organization=organization_value", + ) in kw["metadata"] + + +def test_report_property_usage_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ReportPropertyUsageResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.report_property_usage( + organization="organization_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].organization + mock_val = "organization_value" + assert arg == mock_val + + +def test_report_property_usage_flattened_error(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.report_property_usage( + marketingplatform_admin.ReportPropertyUsageRequest(), + organization="organization_value", + ) + + +@pytest.mark.asyncio +async def test_report_property_usage_flattened_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = marketingplatform_admin.ReportPropertyUsageResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ReportPropertyUsageResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.report_property_usage( + organization="organization_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].organization + mock_val = "organization_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_report_property_usage_flattened_error_async(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.report_property_usage( + marketingplatform_admin.ReportPropertyUsageRequest(), + organization="organization_value", + ) + + def test_get_organization_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3166,7 +4154,292 @@ def test_get_organization_rest_required_fields( transport_class = transports.MarketingplatformAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_organization._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_organization(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_organization_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_organization._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_organization_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_organization(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=organizations/*}" % client.transport._host, args[1] + ) + + +def test_get_organization_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_organization( + marketingplatform_admin.GetOrganizationRequest(), + name="name_value", + ) + + +def test_list_organizations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_organizations in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_organizations + ] = mock_rpc + + request = {} + client.list_organizations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_organizations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_organizations_rest_pager(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), + resources.Organization(), + ], + next_page_token="abc", + ), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[], + next_page_token="def", + ), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + ], + next_page_token="ghi", + ), + marketingplatform_admin.ListOrganizationsResponse( + organizations=[ + resources.Organization(), + resources.Organization(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + marketingplatform_admin.ListOrganizationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_organizations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Organization) for i in results) + + pages = list(client.list_organizations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_find_sales_partner_managed_clients_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.find_sales_partner_managed_clients + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.find_sales_partner_managed_clients + ] = mock_rpc + + request = {} + client.find_sales_partner_managed_clients(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.find_sales_partner_managed_clients(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_find_sales_partner_managed_clients_rest_required_fields( + request_type=marketingplatform_admin.FindSalesPartnerManagedClientsRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["organization"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3177,21 +4450,21 @@ def test_get_organization_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_organization._get_unset_required_fields(jsonified_request) + ).find_sales_partner_managed_clients._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["organization"] = "organization_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_organization._get_unset_required_fields(jsonified_request) + ).find_sales_partner_managed_clients._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "organization" in jsonified_request + assert jsonified_request["organization"] == "organization_value" client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3200,7 +4473,7 @@ def test_get_organization_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = resources.Organization() + return_value = marketingplatform_admin.FindSalesPartnerManagedClientsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3212,92 +4485,43 @@ def test_get_organization_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Organization.pb(return_value) + return_value = ( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_organization(request) + response = client.find_sales_partner_managed_clients(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_organization_rest_unset_required_fields(): +def test_find_sales_partner_managed_clients_rest_unset_required_fields(): transport = transports.MarketingplatformAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_organization._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_organization_rest_flattened(): - client = MarketingplatformAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = resources.Organization() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "organizations/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.Organization.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_organization(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=organizations/*}" % client.transport._host, args[1] - ) - - -def test_get_organization_rest_flattened_error(transport: str = "rest"): - client = MarketingplatformAdminServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + unset_fields = ( + transport.find_sales_partner_managed_clients._get_unset_required_fields({}) ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_organization( - marketingplatform_admin.GetOrganizationRequest(), - name="name_value", - ) + assert set(unset_fields) == (set(()) & set(("organization",))) def test_list_analytics_account_links_rest_use_cached_wrapped_rpc(): @@ -3702,7 +4926,191 @@ def test_create_analytics_account_link_rest_unset_required_fields(): ) -def test_create_analytics_account_link_rest_flattened(): +def test_create_analytics_account_link_rest_flattened(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.AnalyticsAccountLink() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.AnalyticsAccountLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_analytics_account_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + % client.transport._host, + args[1], + ) + + +def test_create_analytics_account_link_rest_flattened_error(transport: str = "rest"): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_analytics_account_link( + marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), + parent="parent_value", + analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + ) + + +def test_delete_analytics_account_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_analytics_account_link + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_analytics_account_link + ] = mock_rpc + + request = {} + client.delete_analytics_account_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_analytics_account_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_analytics_account_link_rest_required_fields( + request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +): + transport_class = transports.MarketingplatformAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_analytics_account_link(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_analytics_account_link_rest_unset_required_fields(): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_analytics_account_link._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_analytics_account_link_rest_flattened(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3711,42 +5119,39 @@ def test_create_analytics_account_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.AnalyticsAccountLink() + return_value = None # get arguments that satisfy an http rule for this method - sample_request = {"parent": "organizations/sample1"} + sample_request = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = resources.AnalyticsAccountLink.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_analytics_account_link(**mock_args) + client.delete_analytics_account_link(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{parent=organizations/*}/analyticsAccountLinks" + "%s/v1alpha/{name=organizations/*/analyticsAccountLinks/*}" % client.transport._host, args[1], ) -def test_create_analytics_account_link_rest_flattened_error(transport: str = "rest"): +def test_delete_analytics_account_link_rest_flattened_error(transport: str = "rest"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3755,14 +5160,13 @@ def test_create_analytics_account_link_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_analytics_account_link( - marketingplatform_admin.CreateAnalyticsAccountLinkRequest(), - parent="parent_value", - analytics_account_link=resources.AnalyticsAccountLink(name="name_value"), + client.delete_analytics_account_link( + marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), + name="name_value", ) -def test_delete_analytics_account_link_rest_use_cached_wrapped_rpc(): +def test_set_property_service_level_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3777,7 +5181,7 @@ def test_delete_analytics_account_link_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_analytics_account_link + client._transport.set_property_service_level in client._transport._wrapped_methods ) @@ -3787,29 +5191,30 @@ def test_delete_analytics_account_link_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_analytics_account_link + client._transport.set_property_service_level ] = mock_rpc request = {} - client.delete_analytics_account_link(request) + client.set_property_service_level(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_analytics_account_link(request) + client.set_property_service_level(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_analytics_account_link_rest_required_fields( - request_type=marketingplatform_admin.DeleteAnalyticsAccountLinkRequest, +def test_set_property_service_level_rest_required_fields( + request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, ): transport_class = transports.MarketingplatformAdminServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["analytics_account_link"] = "" + request_init["analytics_property"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3820,21 +5225,24 @@ def test_delete_analytics_account_link_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + ).set_property_service_level._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["analyticsAccountLink"] = "analytics_account_link_value" + jsonified_request["analyticsProperty"] = "analytics_property_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_analytics_account_link._get_unset_required_fields(jsonified_request) + ).set_property_service_level._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "analyticsAccountLink" in jsonified_request + assert jsonified_request["analyticsAccountLink"] == "analytics_account_link_value" + assert "analyticsProperty" in jsonified_request + assert jsonified_request["analyticsProperty"] == "analytics_property_value" client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3843,7 +5251,7 @@ def test_delete_analytics_account_link_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3855,38 +5263,51 @@ def test_delete_analytics_account_link_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_analytics_account_link(request) + response = client.set_property_service_level(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_analytics_account_link_rest_unset_required_fields(): +def test_set_property_service_level_rest_unset_required_fields(): transport = transports.MarketingplatformAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_analytics_account_link._get_unset_required_fields( - {} + unset_fields = transport.set_property_service_level._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "analyticsAccountLink", + "analyticsProperty", + "serviceLevel", + ) + ) ) - assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_analytics_account_link_rest_flattened(): +def test_set_property_service_level_rest_flattened(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3895,39 +5316,45 @@ def test_delete_analytics_account_link_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "organizations/sample1/analyticsAccountLinks/sample2"} + sample_request = { + "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + analytics_account_link="analytics_account_link_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_analytics_account_link(**mock_args) + client.set_property_service_level(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=organizations/*/analyticsAccountLinks/*}" + "%s/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel" % client.transport._host, args[1], ) -def test_delete_analytics_account_link_rest_flattened_error(transport: str = "rest"): +def test_set_property_service_level_rest_flattened_error(transport: str = "rest"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3936,13 +5363,13 @@ def test_delete_analytics_account_link_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_analytics_account_link( - marketingplatform_admin.DeleteAnalyticsAccountLinkRequest(), - name="name_value", + client.set_property_service_level( + marketingplatform_admin.SetPropertyServiceLevelRequest(), + analytics_account_link="analytics_account_link_value", ) -def test_set_property_service_level_rest_use_cached_wrapped_rpc(): +def test_report_property_usage_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3957,7 +5384,7 @@ def test_set_property_service_level_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.set_property_service_level + client._transport.report_property_usage in client._transport._wrapped_methods ) @@ -3967,30 +5394,30 @@ def test_set_property_service_level_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.set_property_service_level + client._transport.report_property_usage ] = mock_rpc request = {} - client.set_property_service_level(request) + client.report_property_usage(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.set_property_service_level(request) + client.report_property_usage(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_set_property_service_level_rest_required_fields( - request_type=marketingplatform_admin.SetPropertyServiceLevelRequest, +def test_report_property_usage_rest_required_fields( + request_type=marketingplatform_admin.ReportPropertyUsageRequest, ): transport_class = transports.MarketingplatformAdminServiceRestTransport request_init = {} - request_init["analytics_account_link"] = "" - request_init["analytics_property"] = "" + request_init["organization"] = "" + request_init["month"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4001,24 +5428,24 @@ def test_set_property_service_level_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_property_service_level._get_unset_required_fields(jsonified_request) + ).report_property_usage._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["analyticsAccountLink"] = "analytics_account_link_value" - jsonified_request["analyticsProperty"] = "analytics_property_value" + jsonified_request["organization"] = "organization_value" + jsonified_request["month"] = "month_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).set_property_service_level._get_unset_required_fields(jsonified_request) + ).report_property_usage._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "analyticsAccountLink" in jsonified_request - assert jsonified_request["analyticsAccountLink"] == "analytics_account_link_value" - assert "analyticsProperty" in jsonified_request - assert jsonified_request["analyticsProperty"] == "analytics_property_value" + assert "organization" in jsonified_request + assert jsonified_request["organization"] == "organization_value" + assert "month" in jsonified_request + assert jsonified_request["month"] == "month_value" client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4027,7 +5454,7 @@ def test_set_property_service_level_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + return_value = marketingplatform_admin.ReportPropertyUsageResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4049,7 +5476,7 @@ def test_set_property_service_level_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value = marketingplatform_admin.ReportPropertyUsageResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -4058,32 +5485,31 @@ def test_set_property_service_level_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.set_property_service_level(request) + response = client.report_property_usage(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_set_property_service_level_rest_unset_required_fields(): +def test_report_property_usage_rest_unset_required_fields(): transport = transports.MarketingplatformAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.set_property_service_level._get_unset_required_fields({}) + unset_fields = transport.report_property_usage._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( - "analyticsAccountLink", - "analyticsProperty", - "serviceLevel", + "organization", + "month", ) ) ) -def test_set_property_service_level_rest_flattened(): +def test_report_property_usage_rest_flattened(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4092,16 +5518,14 @@ def test_set_property_service_level_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = marketingplatform_admin.SetPropertyServiceLevelResponse() + return_value = marketingplatform_admin.ReportPropertyUsageResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "analytics_account_link": "organizations/sample1/analyticsAccountLinks/sample2" - } + sample_request = {"organization": "organizations/sample1"} # get truthy value for each flattened field mock_args = dict( - analytics_account_link="analytics_account_link_value", + organization="organization_value", ) mock_args.update(sample_request) @@ -4109,7 +5533,7 @@ def test_set_property_service_level_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = marketingplatform_admin.SetPropertyServiceLevelResponse.pb( + return_value = marketingplatform_admin.ReportPropertyUsageResponse.pb( return_value ) json_return_value = json_format.MessageToJson(return_value) @@ -4117,20 +5541,20 @@ def test_set_property_service_level_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.set_property_service_level(**mock_args) + client.report_property_usage(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{analytics_account_link=organizations/*/analyticsAccountLinks/*}:setPropertyServiceLevel" + "%s/v1alpha/{organization=organizations/*}:reportPropertyUsage" % client.transport._host, args[1], ) -def test_set_property_service_level_rest_flattened_error(transport: str = "rest"): +def test_report_property_usage_rest_flattened_error(transport: str = "rest"): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4139,9 +5563,9 @@ def test_set_property_service_level_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.set_property_service_level( - marketingplatform_admin.SetPropertyServiceLevelRequest(), - analytics_account_link="analytics_account_link_value", + client.report_property_usage( + marketingplatform_admin.ReportPropertyUsageRequest(), + organization="organization_value", ) @@ -4272,6 +5696,54 @@ def test_get_organization_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_organizations_empty_call_grpc(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_organizations), "__call__" + ) as call: + call.return_value = marketingplatform_admin.ListOrganizationsResponse() + client.list_organizations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.ListOrganizationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_find_sales_partner_managed_clients_empty_call_grpc(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.find_sales_partner_managed_clients), "__call__" + ) as call: + call.return_value = ( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse() + ) + client.find_sales_partner_managed_clients(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.FindSalesPartnerManagedClientsRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_analytics_account_links_empty_call_grpc(): @@ -4364,6 +5836,29 @@ def test_set_property_service_level_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_report_property_usage_empty_call_grpc(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + call.return_value = marketingplatform_admin.ReportPropertyUsageResponse() + client.report_property_usage(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.ReportPropertyUsageRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = MarketingplatformAdminServiceAsyncClient.get_transport_class( "grpc_asyncio" @@ -4381,27 +5876,83 @@ def test_initialize_client_w_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_get_organization_empty_call_grpc_asyncio(): +async def test_get_organization_empty_call_grpc_asyncio(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Organization( + name="name_value", + display_name="display_name_value", + ) + ) + await client.get_organization(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.GetOrganizationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_organizations_empty_call_grpc_asyncio(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_organizations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ListOrganizationsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_organizations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.ListOrganizationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_find_sales_partner_managed_clients_empty_call_grpc_asyncio(): client = MarketingplatformAdminServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_organization), "__call__") as call: + with mock.patch.object( + type(client.transport.find_sales_partner_managed_clients), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Organization( - name="name_value", - display_name="display_name_value", - ) + marketingplatform_admin.FindSalesPartnerManagedClientsResponse() ) - await client.get_organization(request=None) + await client.find_sales_partner_managed_clients(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = marketingplatform_admin.GetOrganizationRequest() + request_msg = marketingplatform_admin.FindSalesPartnerManagedClientsRequest() assert args[0] == request_msg @@ -4509,31 +6060,325 @@ async def test_set_property_service_level_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( marketingplatform_admin.SetPropertyServiceLevelResponse() ) - await client.set_property_service_level(request=None) + await client.set_property_service_level(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.SetPropertyServiceLevelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_report_property_usage_empty_call_grpc_asyncio(): + client = MarketingplatformAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + marketingplatform_admin.ReportPropertyUsageResponse() + ) + await client.report_property_usage(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.ReportPropertyUsageRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = MarketingplatformAdminServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_get_organization_rest_bad_request( + request_type=marketingplatform_admin.GetOrganizationRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_organization(request) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.GetOrganizationRequest, + dict, + ], +) +def test_get_organization_rest_call_success(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.Organization( + name="name_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.Organization.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_organization(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Organization) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_organization_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "post_get_organization" + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_get_organization_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, "pre_get_organization" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = marketingplatform_admin.GetOrganizationRequest.pb( + marketingplatform_admin.GetOrganizationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.Organization.to_json(resources.Organization()) + req.return_value.content = return_value + + request = marketingplatform_admin.GetOrganizationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.Organization() + post_with_metadata.return_value = resources.Organization(), metadata + + client.get_organization( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_organizations_rest_bad_request( + request_type=marketingplatform_admin.ListOrganizationsRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_organizations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ListOrganizationsRequest, + dict, + ], +) +def test_list_organizations_rest_call_success(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ListOrganizationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.ListOrganizationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_organizations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOrganizationsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_organizations_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_list_organizations", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_list_organizations_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_list_organizations", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = marketingplatform_admin.ListOrganizationsRequest.pb( + marketingplatform_admin.ListOrganizationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = marketingplatform_admin.SetPropertyServiceLevelRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = marketingplatform_admin.ListOrganizationsResponse.to_json( + marketingplatform_admin.ListOrganizationsResponse() + ) + req.return_value.content = return_value - assert args[0] == request_msg + request = marketingplatform_admin.ListOrganizationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.ListOrganizationsResponse() + post_with_metadata.return_value = ( + marketingplatform_admin.ListOrganizationsResponse(), + metadata, + ) + client.list_organizations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = MarketingplatformAdminServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_get_organization_rest_bad_request( - request_type=marketingplatform_admin.GetOrganizationRequest, +def test_find_sales_partner_managed_clients_rest_bad_request( + request_type=marketingplatform_admin.FindSalesPartnerManagedClientsRequest, ): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "organizations/sample1"} + request_init = {"organization": "organizations/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4548,53 +6393,54 @@ def test_get_organization_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_organization(request) + client.find_sales_partner_managed_clients(request) @pytest.mark.parametrize( "request_type", [ - marketingplatform_admin.GetOrganizationRequest, + marketingplatform_admin.FindSalesPartnerManagedClientsRequest, dict, ], ) -def test_get_organization_rest_call_success(request_type): +def test_find_sales_partner_managed_clients_rest_call_success(request_type): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "organizations/sample1"} + request_init = {"organization": "organizations/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = resources.Organization( - name="name_value", - display_name="display_name_value", - ) + return_value = marketingplatform_admin.FindSalesPartnerManagedClientsResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = resources.Organization.pb(return_value) + return_value = ( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_organization(request) + response = client.find_sales_partner_managed_clients(request) # Establish that the response is the type that we expect. - assert isinstance(response, resources.Organization) - assert response.name == "name_value" - assert response.display_name == "display_name_value" + assert isinstance( + response, marketingplatform_admin.FindSalesPartnerManagedClientsResponse + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_organization_rest_interceptors(null_interceptor): +def test_find_sales_partner_managed_clients_rest_interceptors(null_interceptor): transport = transports.MarketingplatformAdminServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4608,18 +6454,20 @@ def test_get_organization_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.MarketingplatformAdminServiceRestInterceptor, "post_get_organization" + transports.MarketingplatformAdminServiceRestInterceptor, + "post_find_sales_partner_managed_clients", ) as post, mock.patch.object( transports.MarketingplatformAdminServiceRestInterceptor, - "post_get_organization_with_metadata", + "post_find_sales_partner_managed_clients_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.MarketingplatformAdminServiceRestInterceptor, "pre_get_organization" + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_find_sales_partner_managed_clients", ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = marketingplatform_admin.GetOrganizationRequest.pb( - marketingplatform_admin.GetOrganizationRequest() + pb_message = marketingplatform_admin.FindSalesPartnerManagedClientsRequest.pb( + marketingplatform_admin.FindSalesPartnerManagedClientsRequest() ) transcode.return_value = { "method": "post", @@ -4631,19 +6479,28 @@ def test_get_organization_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.Organization.to_json(resources.Organization()) + return_value = ( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse.to_json( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse() + ) + ) req.return_value.content = return_value - request = marketingplatform_admin.GetOrganizationRequest() + request = marketingplatform_admin.FindSalesPartnerManagedClientsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.Organization() - post_with_metadata.return_value = resources.Organization(), metadata + post.return_value = ( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse() + ) + post_with_metadata.return_value = ( + marketingplatform_admin.FindSalesPartnerManagedClientsResponse(), + metadata, + ) - client.get_organization( + client.find_sales_partner_managed_clients( request, metadata=[ ("key", "val"), @@ -5261,6 +7118,140 @@ def test_set_property_service_level_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_report_property_usage_rest_bad_request( + request_type=marketingplatform_admin.ReportPropertyUsageRequest, +): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"organization": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.report_property_usage(request) + + +@pytest.mark.parametrize( + "request_type", + [ + marketingplatform_admin.ReportPropertyUsageRequest, + dict, + ], +) +def test_report_property_usage_rest_call_success(request_type): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"organization": "organizations/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = marketingplatform_admin.ReportPropertyUsageResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = marketingplatform_admin.ReportPropertyUsageResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.report_property_usage(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, marketingplatform_admin.ReportPropertyUsageResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_report_property_usage_rest_interceptors(null_interceptor): + transport = transports.MarketingplatformAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MarketingplatformAdminServiceRestInterceptor(), + ) + client = MarketingplatformAdminServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_report_property_usage", + ) as post, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "post_report_property_usage_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MarketingplatformAdminServiceRestInterceptor, + "pre_report_property_usage", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = marketingplatform_admin.ReportPropertyUsageRequest.pb( + marketingplatform_admin.ReportPropertyUsageRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = marketingplatform_admin.ReportPropertyUsageResponse.to_json( + marketingplatform_admin.ReportPropertyUsageResponse() + ) + req.return_value.content = return_value + + request = marketingplatform_admin.ReportPropertyUsageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = marketingplatform_admin.ReportPropertyUsageResponse() + post_with_metadata.return_value = ( + marketingplatform_admin.ReportPropertyUsageResponse(), + metadata, + ) + + client.report_property_usage( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = MarketingplatformAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -5288,6 +7279,50 @@ def test_get_organization_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_organizations_empty_call_rest(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_organizations), "__call__" + ) as call: + client.list_organizations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.ListOrganizationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_find_sales_partner_managed_clients_empty_call_rest(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.find_sales_partner_managed_clients), "__call__" + ) as call: + client.find_sales_partner_managed_clients(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.FindSalesPartnerManagedClientsRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_analytics_account_links_empty_call_rest(): @@ -5376,6 +7411,28 @@ def test_set_property_service_level_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_report_property_usage_empty_call_rest(): + client = MarketingplatformAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report_property_usage), "__call__" + ) as call: + client.report_property_usage(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = marketingplatform_admin.ReportPropertyUsageRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MarketingplatformAdminServiceClient( @@ -5410,10 +7467,13 @@ def test_marketingplatform_admin_service_base_transport(): # raise NotImplementedError. methods = ( "get_organization", + "list_organizations", + "find_sales_partner_managed_clients", "list_analytics_account_links", "create_analytics_account_link", "delete_analytics_account_link", "set_property_service_level", + "report_property_usage", ) for method in methods: with pytest.raises(NotImplementedError): @@ -5698,6 +7758,12 @@ def test_marketingplatform_admin_service_client_transport_session_collision( session1 = client1.transport.get_organization._session session2 = client2.transport.get_organization._session assert session1 != session2 + session1 = client1.transport.list_organizations._session + session2 = client2.transport.list_organizations._session + assert session1 != session2 + session1 = client1.transport.find_sales_partner_managed_clients._session + session2 = client2.transport.find_sales_partner_managed_clients._session + assert session1 != session2 session1 = client1.transport.list_analytics_account_links._session session2 = client2.transport.list_analytics_account_links._session assert session1 != session2 @@ -5710,6 +7776,9 @@ def test_marketingplatform_admin_service_client_transport_session_collision( session1 = client1.transport.set_property_service_level._session session2 = client2.transport.set_property_service_level._session assert session1 != session2 + session1 = client1.transport.report_property_usage._session + session2 = client2.transport.report_property_usage._session + assert session1 != session2 def test_marketingplatform_admin_service_grpc_transport_channel(): diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py index 6d637114bc16..2daf0985f7e2 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py @@ -71,6 +71,7 @@ BackupApplianceLockInfo, BackupConfigInfo, BackupConfigState, + BackupGcpResource, BackupLock, BackupVault, BackupVaultView, @@ -81,6 +82,8 @@ DataSourceGcpResource, DeleteBackupRequest, DeleteBackupVaultRequest, + FetchBackupsForResourceTypeRequest, + FetchBackupsForResourceTypeResponse, FetchUsableBackupVaultsRequest, FetchUsableBackupVaultsResponse, GcpBackupConfig, @@ -152,6 +155,8 @@ FetchDataSourceReferencesForResourceTypeRequest, FetchDataSourceReferencesForResourceTypeResponse, GetDataSourceReferenceRequest, + ListDataSourceReferencesRequest, + ListDataSourceReferencesResponse, ) __all__ = ( @@ -200,6 +205,7 @@ "BackupApplianceBackupConfig", "BackupApplianceLockInfo", "BackupConfigInfo", + "BackupGcpResource", "BackupLock", "BackupVault", "CreateBackupVaultRequest", @@ -208,6 +214,8 @@ "DataSourceGcpResource", "DeleteBackupRequest", "DeleteBackupVaultRequest", + "FetchBackupsForResourceTypeRequest", + "FetchBackupsForResourceTypeResponse", "FetchUsableBackupVaultsRequest", "FetchUsableBackupVaultsResponse", "GcpBackupConfig", @@ -272,4 +280,6 @@ "FetchDataSourceReferencesForResourceTypeRequest", "FetchDataSourceReferencesForResourceTypeResponse", "GetDataSourceReferenceRequest", + "ListDataSourceReferencesRequest", + "ListDataSourceReferencesResponse", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py index ecbf09aa3b54..dce933a4a018 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py @@ -70,6 +70,7 @@ BackupApplianceLockInfo, BackupConfigInfo, BackupConfigState, + BackupGcpResource, BackupLock, BackupVault, BackupVaultView, @@ -80,6 +81,8 @@ DataSourceGcpResource, DeleteBackupRequest, DeleteBackupVaultRequest, + FetchBackupsForResourceTypeRequest, + FetchBackupsForResourceTypeResponse, FetchUsableBackupVaultsRequest, FetchUsableBackupVaultsResponse, GcpBackupConfig, @@ -149,6 +152,8 @@ FetchDataSourceReferencesForResourceTypeRequest, FetchDataSourceReferencesForResourceTypeResponse, GetDataSourceReferenceRequest, + ListDataSourceReferencesRequest, + ListDataSourceReferencesResponse, ) __all__ = ( @@ -166,6 +171,7 @@ "BackupConfigInfo", "BackupConfigState", "BackupDRClient", + "BackupGcpResource", "BackupLock", "BackupPlan", "BackupPlanAssociation", @@ -209,6 +215,8 @@ "Entry", "FetchBackupPlanAssociationsForResourceTypeRequest", "FetchBackupPlanAssociationsForResourceTypeResponse", + "FetchBackupsForResourceTypeRequest", + "FetchBackupsForResourceTypeResponse", "FetchDataSourceReferencesForResourceTypeRequest", "FetchDataSourceReferencesForResourceTypeResponse", "FetchUsableBackupVaultsRequest", @@ -238,6 +246,8 @@ "ListBackupVaultsResponse", "ListBackupsRequest", "ListBackupsResponse", + "ListDataSourceReferencesRequest", + "ListDataSourceReferencesResponse", "ListDataSourcesRequest", "ListDataSourcesResponse", "ListManagementServersRequest", diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json index 3f336db90724..7e740c7d1390 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json @@ -60,6 +60,11 @@ "fetch_backup_plan_associations_for_resource_type" ] }, + "FetchBackupsForResourceType": { + "methods": [ + "fetch_backups_for_resource_type" + ] + }, "FetchDataSourceReferencesForResourceType": { "methods": [ "fetch_data_source_references_for_resource_type" @@ -140,6 +145,11 @@ "list_backups" ] }, + "ListDataSourceReferences": { + "methods": [ + "list_data_source_references" + ] + }, "ListDataSources": { "methods": [ "list_data_sources" @@ -240,6 +250,11 @@ "fetch_backup_plan_associations_for_resource_type" ] }, + "FetchBackupsForResourceType": { + "methods": [ + "fetch_backups_for_resource_type" + ] + }, "FetchDataSourceReferencesForResourceType": { "methods": [ "fetch_data_source_references_for_resource_type" @@ -320,6 +335,11 @@ "list_backups" ] }, + "ListDataSourceReferences": { + "methods": [ + "list_data_source_references" + ] + }, "ListDataSources": { "methods": [ "list_data_sources" @@ -420,6 +440,11 @@ "fetch_backup_plan_associations_for_resource_type" ] }, + "FetchBackupsForResourceType": { + "methods": [ + "fetch_backups_for_resource_type" + ] + }, "FetchDataSourceReferencesForResourceType": { "methods": [ "fetch_data_source_references_for_resource_type" @@ -500,6 +525,11 @@ "list_backups" ] }, + "ListDataSourceReferences": { + "methods": [ + "list_data_source_references" + ] + }, "ListDataSources": { "methods": [ "list_data_sources" diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index 97afd8d48bed..bee2acb8fb15 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -2159,6 +2159,148 @@ async def sample_list_backups(): # Done; return the response. return response + async def fetch_backups_for_resource_type( + self, + request: Optional[ + Union[backupvault.FetchBackupsForResourceTypeRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + resource_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.FetchBackupsForResourceTypeAsyncPager: + r"""Fetch Backups for a given resource type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_fetch_backups_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchBackupsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_backups_for_resource_type(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeRequest, dict]]): + The request object. Request for the + FetchBackupsForResourceType method. + parent (:class:`str`): + Required. Datasources are the parent + resource for the backups. Format: + + projects/{project}/locations/{location}/backupVaults/{backupVaultId}/dataSources/{datasourceId} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_type (:class:`str`): + Required. The type of the GCP + resource. Ex: + sqladmin.googleapis.com/Instance + + This corresponds to the ``resource_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchBackupsForResourceTypeAsyncPager: + Response for the + FetchBackupsForResourceType method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, resource_type] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchBackupsForResourceTypeRequest): + request = backupvault.FetchBackupsForResourceTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if resource_type is not None: + request.resource_type = resource_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_backups_for_resource_type + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchBackupsForResourceTypeAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_backup( self, request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, @@ -4717,6 +4859,136 @@ async def sample_get_data_source_reference(): # Done; return the response. return response + async def list_data_source_references( + self, + request: Optional[ + Union[datasourcereference.ListDataSourceReferencesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataSourceReferencesAsyncPager: + r"""Lists DataSourceReferences for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_data_source_references(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourceReferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_source_references(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListDataSourceReferencesRequest, dict]]): + The request object. Request for the + ListDataSourceReferences method. + parent (:class:`str`): + Required. The parent resource name. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourceReferencesAsyncPager: + Response for the + ListDataSourceReferences method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasourcereference.ListDataSourceReferencesRequest): + request = datasourcereference.ListDataSourceReferencesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_source_references + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataSourceReferencesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def fetch_data_source_references_for_resource_type( self, request: Optional[ diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index 427d7dbce7c8..20f3e747a952 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -2722,6 +2722,147 @@ def sample_list_backups(): # Done; return the response. return response + def fetch_backups_for_resource_type( + self, + request: Optional[ + Union[backupvault.FetchBackupsForResourceTypeRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + resource_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.FetchBackupsForResourceTypePager: + r"""Fetch Backups for a given resource type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_fetch_backups_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchBackupsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_backups_for_resource_type(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeRequest, dict]): + The request object. Request for the + FetchBackupsForResourceType method. + parent (str): + Required. Datasources are the parent + resource for the backups. Format: + + projects/{project}/locations/{location}/backupVaults/{backupVaultId}/dataSources/{datasourceId} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_type (str): + Required. The type of the GCP + resource. Ex: + sqladmin.googleapis.com/Instance + + This corresponds to the ``resource_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchBackupsForResourceTypePager: + Response for the + FetchBackupsForResourceType method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, resource_type] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupvault.FetchBackupsForResourceTypeRequest): + request = backupvault.FetchBackupsForResourceTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if resource_type is not None: + request.resource_type = resource_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_backups_for_resource_type + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchBackupsForResourceTypePager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def get_backup( self, request: Optional[Union[backupvault.GetBackupRequest, dict]] = None, @@ -5239,6 +5380,135 @@ def sample_get_data_source_reference(): # Done; return the response. return response + def list_data_source_references( + self, + request: Optional[ + Union[datasourcereference.ListDataSourceReferencesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataSourceReferencesPager: + r"""Lists DataSourceReferences for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_data_source_references(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourceReferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_source_references(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListDataSourceReferencesRequest, dict]): + The request object. Request for the + ListDataSourceReferences method. + parent (str): + Required. The parent resource name. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourceReferencesPager: + Response for the + ListDataSourceReferences method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasourcereference.ListDataSourceReferencesRequest): + request = datasourcereference.ListDataSourceReferencesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_data_source_references + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataSourceReferencesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def fetch_data_source_references_for_resource_type( self, request: Optional[ diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py index 0249a8066846..47ed1bcd23c2 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py @@ -827,6 +827,166 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class FetchBackupsForResourceTypePager: + """A pager for iterating through ``fetch_backups_for_resource_type`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchBackupsForResourceType`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupvault.FetchBackupsForResourceTypeResponse], + request: backupvault.FetchBackupsForResourceTypeRequest, + response: backupvault.FetchBackupsForResourceTypeResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backupvault.FetchBackupsForResourceTypeRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupvault.FetchBackupsForResourceTypeResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupvault.Backup]: + for page in self.pages: + yield from page.backups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchBackupsForResourceTypeAsyncPager: + """A pager for iterating through ``fetch_backups_for_resource_type`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchBackupsForResourceType`` requests and continue to iterate + through the ``backups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[backupvault.FetchBackupsForResourceTypeResponse] + ], + request: backupvault.FetchBackupsForResourceTypeRequest, + response: backupvault.FetchBackupsForResourceTypeResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backupvault.FetchBackupsForResourceTypeRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[backupvault.FetchBackupsForResourceTypeResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupvault.Backup]: + async def async_generator(): + async for page in self.pages: + for response in page.backups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListBackupPlansPager: """A pager for iterating through ``list_backup_plans`` requests. @@ -1481,6 +1641,166 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListDataSourceReferencesPager: + """A pager for iterating through ``list_data_source_references`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourceReferencesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_source_references`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataSourceReferences`` requests and continue to iterate + through the ``data_source_references`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourceReferencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., datasourcereference.ListDataSourceReferencesResponse], + request: datasourcereference.ListDataSourceReferencesRequest, + response: datasourcereference.ListDataSourceReferencesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourceReferencesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourceReferencesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = datasourcereference.ListDataSourceReferencesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[datasourcereference.ListDataSourceReferencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[datasourcereference.DataSourceReference]: + for page in self.pages: + yield from page.data_source_references + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataSourceReferencesAsyncPager: + """A pager for iterating through ``list_data_source_references`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListDataSourceReferencesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_source_references`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataSourceReferences`` requests and continue to iterate + through the ``data_source_references`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListDataSourceReferencesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[datasourcereference.ListDataSourceReferencesResponse] + ], + request: datasourcereference.ListDataSourceReferencesRequest, + response: datasourcereference.ListDataSourceReferencesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListDataSourceReferencesRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListDataSourceReferencesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = datasourcereference.ListDataSourceReferencesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[datasourcereference.ListDataSourceReferencesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[datasourcereference.DataSourceReference]: + async def async_generator(): + async for page in self.pages: + for response in page.data_source_references: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class FetchDataSourceReferencesForResourceTypePager: """A pager for iterating through ``fetch_data_source_references_for_resource_type`` requests. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py index 6d767c9dc791..35271660c5aa 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py @@ -303,6 +303,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.fetch_backups_for_resource_type: gapic_v1.method.wrap_method( + self.fetch_backups_for_resource_type, + default_timeout=None, + client_info=client_info, + ), self.get_backup: gapic_v1.method.wrap_method( self.get_backup, default_retry=retries.Retry( @@ -407,6 +412,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_data_source_references: gapic_v1.method.wrap_method( + self.list_data_source_references, + default_timeout=None, + client_info=client_info, + ), self.fetch_data_source_references_for_resource_type: gapic_v1.method.wrap_method( self.fetch_data_source_references_for_resource_type, default_timeout=None, @@ -627,6 +637,18 @@ def list_backups( ]: raise NotImplementedError() + @property + def fetch_backups_for_resource_type( + self, + ) -> Callable[ + [backupvault.FetchBackupsForResourceTypeRequest], + Union[ + backupvault.FetchBackupsForResourceTypeResponse, + Awaitable[backupvault.FetchBackupsForResourceTypeResponse], + ], + ]: + raise NotImplementedError() + @property def get_backup( self, @@ -818,6 +840,18 @@ def get_data_source_reference( ]: raise NotImplementedError() + @property + def list_data_source_references( + self, + ) -> Callable[ + [datasourcereference.ListDataSourceReferencesRequest], + Union[ + datasourcereference.ListDataSourceReferencesResponse, + Awaitable[datasourcereference.ListDataSourceReferencesResponse], + ], + ]: + raise NotImplementedError() + @property def fetch_data_source_references_for_resource_type( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py index f508d3abc79f..ae742db7fe47 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py @@ -728,6 +728,38 @@ def list_backups( ) return self._stubs["list_backups"] + @property + def fetch_backups_for_resource_type( + self, + ) -> Callable[ + [backupvault.FetchBackupsForResourceTypeRequest], + backupvault.FetchBackupsForResourceTypeResponse, + ]: + r"""Return a callable for the fetch backups for resource + type method over gRPC. + + Fetch Backups for a given resource type. + + Returns: + Callable[[~.FetchBackupsForResourceTypeRequest], + ~.FetchBackupsForResourceTypeResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_backups_for_resource_type" not in self._stubs: + self._stubs[ + "fetch_backups_for_resource_type" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchBackupsForResourceType", + request_serializer=backupvault.FetchBackupsForResourceTypeRequest.serialize, + response_deserializer=backupvault.FetchBackupsForResourceTypeResponse.deserialize, + ) + return self._stubs["fetch_backups_for_resource_type"] + @property def get_backup( self, @@ -1270,6 +1302,38 @@ def get_data_source_reference( ) return self._stubs["get_data_source_reference"] + @property + def list_data_source_references( + self, + ) -> Callable[ + [datasourcereference.ListDataSourceReferencesRequest], + datasourcereference.ListDataSourceReferencesResponse, + ]: + r"""Return a callable for the list data source references method over gRPC. + + Lists DataSourceReferences for a given project and + location. + + Returns: + Callable[[~.ListDataSourceReferencesRequest], + ~.ListDataSourceReferencesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_source_references" not in self._stubs: + self._stubs[ + "list_data_source_references" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSourceReferences", + request_serializer=datasourcereference.ListDataSourceReferencesRequest.serialize, + response_deserializer=datasourcereference.ListDataSourceReferencesResponse.deserialize, + ) + return self._stubs["list_data_source_references"] + @property def fetch_data_source_references_for_resource_type( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py index 020dc7f55346..1ea85b16888f 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py @@ -759,6 +759,38 @@ def list_backups( ) return self._stubs["list_backups"] + @property + def fetch_backups_for_resource_type( + self, + ) -> Callable[ + [backupvault.FetchBackupsForResourceTypeRequest], + Awaitable[backupvault.FetchBackupsForResourceTypeResponse], + ]: + r"""Return a callable for the fetch backups for resource + type method over gRPC. + + Fetch Backups for a given resource type. + + Returns: + Callable[[~.FetchBackupsForResourceTypeRequest], + Awaitable[~.FetchBackupsForResourceTypeResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_backups_for_resource_type" not in self._stubs: + self._stubs[ + "fetch_backups_for_resource_type" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchBackupsForResourceType", + request_serializer=backupvault.FetchBackupsForResourceTypeRequest.serialize, + response_deserializer=backupvault.FetchBackupsForResourceTypeResponse.deserialize, + ) + return self._stubs["fetch_backups_for_resource_type"] + @property def get_backup( self, @@ -1318,6 +1350,38 @@ def get_data_source_reference( ) return self._stubs["get_data_source_reference"] + @property + def list_data_source_references( + self, + ) -> Callable[ + [datasourcereference.ListDataSourceReferencesRequest], + Awaitable[datasourcereference.ListDataSourceReferencesResponse], + ]: + r"""Return a callable for the list data source references method over gRPC. + + Lists DataSourceReferences for a given project and + location. + + Returns: + Callable[[~.ListDataSourceReferencesRequest], + Awaitable[~.ListDataSourceReferencesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_source_references" not in self._stubs: + self._stubs[ + "list_data_source_references" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListDataSourceReferences", + request_serializer=datasourcereference.ListDataSourceReferencesRequest.serialize, + response_deserializer=datasourcereference.ListDataSourceReferencesResponse.deserialize, + ) + return self._stubs["list_data_source_references"] + @property def fetch_data_source_references_for_resource_type( self, @@ -1542,6 +1606,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.fetch_backups_for_resource_type: self._wrap_method( + self.fetch_backups_for_resource_type, + default_timeout=None, + client_info=client_info, + ), self.get_backup: self._wrap_method( self.get_backup, default_retry=retries.AsyncRetry( @@ -1646,6 +1715,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_data_source_references: self._wrap_method( + self.list_data_source_references, + default_timeout=None, + client_info=client_info, + ), self.fetch_data_source_references_for_resource_type: self._wrap_method( self.fetch_data_source_references_for_resource_type, default_timeout=None, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py index 0938939fdca7..850a894716f5 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py @@ -162,6 +162,14 @@ def post_fetch_backup_plan_associations_for_resource_type(self, response): logging.log(f"Received response: {response}") return response + def pre_fetch_backups_for_resource_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_backups_for_resource_type(self, response): + logging.log(f"Received response: {response}") + return response + def pre_fetch_data_source_references_for_resource_type(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -290,6 +298,14 @@ def post_list_backup_vaults(self, response): logging.log(f"Received response: {response}") return response + def pre_list_data_source_references(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_source_references(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_data_sources(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -855,6 +871,58 @@ def post_fetch_backup_plan_associations_for_resource_type_with_metadata( """ return response, metadata + def pre_fetch_backups_for_resource_type( + self, + request: backupvault.FetchBackupsForResourceTypeRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupvault.FetchBackupsForResourceTypeRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for fetch_backups_for_resource_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_fetch_backups_for_resource_type( + self, response: backupvault.FetchBackupsForResourceTypeResponse + ) -> backupvault.FetchBackupsForResourceTypeResponse: + """Post-rpc interceptor for fetch_backups_for_resource_type + + DEPRECATED. Please use the `post_fetch_backups_for_resource_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_fetch_backups_for_resource_type` interceptor runs + before the `post_fetch_backups_for_resource_type_with_metadata` interceptor. + """ + return response + + def post_fetch_backups_for_resource_type_with_metadata( + self, + response: backupvault.FetchBackupsForResourceTypeResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupvault.FetchBackupsForResourceTypeResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_backups_for_resource_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_fetch_backups_for_resource_type_with_metadata` + interceptor in new development instead of the `post_fetch_backups_for_resource_type` interceptor. + When both interceptors are used, this `post_fetch_backups_for_resource_type_with_metadata` interceptor runs after the + `post_fetch_backups_for_resource_type` interceptor. The (possibly modified) response returned by + `post_fetch_backups_for_resource_type` will be passed to + `post_fetch_backups_for_resource_type_with_metadata`. + """ + return response, metadata + def pre_fetch_data_source_references_for_resource_type( self, request: datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, @@ -1647,6 +1715,58 @@ def post_list_backup_vaults_with_metadata( """ return response, metadata + def pre_list_data_source_references( + self, + request: datasourcereference.ListDataSourceReferencesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datasourcereference.ListDataSourceReferencesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_data_source_references + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_data_source_references( + self, response: datasourcereference.ListDataSourceReferencesResponse + ) -> datasourcereference.ListDataSourceReferencesResponse: + """Post-rpc interceptor for list_data_source_references + + DEPRECATED. Please use the `post_list_data_source_references_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_list_data_source_references` interceptor runs + before the `post_list_data_source_references_with_metadata` interceptor. + """ + return response + + def post_list_data_source_references_with_metadata( + self, + response: datasourcereference.ListDataSourceReferencesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datasourcereference.ListDataSourceReferencesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_data_source_references + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_list_data_source_references_with_metadata` + interceptor in new development instead of the `post_list_data_source_references` interceptor. + When both interceptors are used, this `post_list_data_source_references_with_metadata` interceptor runs after the + `post_list_data_source_references` interceptor. The (possibly modified) response returned by + `post_list_data_source_references` will be passed to + `post_list_data_source_references_with_metadata`. + """ + return response, metadata + def pre_list_data_sources( self, request: backupvault.ListDataSourcesRequest, @@ -3981,6 +4101,163 @@ def __call__( ) return resp + class _FetchBackupsForResourceType( + _BaseBackupDRRestTransport._BaseFetchBackupsForResourceType, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.FetchBackupsForResourceType") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backupvault.FetchBackupsForResourceTypeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backupvault.FetchBackupsForResourceTypeResponse: + r"""Call the fetch backups for + resource type method over HTTP. + + Args: + request (~.backupvault.FetchBackupsForResourceTypeRequest): + The request object. Request for the + FetchBackupsForResourceType method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backupvault.FetchBackupsForResourceTypeResponse: + Response for the + FetchBackupsForResourceType method. + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseFetchBackupsForResourceType._get_http_options() + ) + + request, metadata = self._interceptor.pre_fetch_backups_for_resource_type( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseFetchBackupsForResourceType._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseFetchBackupsForResourceType._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.FetchBackupsForResourceType", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "FetchBackupsForResourceType", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._FetchBackupsForResourceType._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.FetchBackupsForResourceTypeResponse() + pb_resp = backupvault.FetchBackupsForResourceTypeResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_fetch_backups_for_resource_type(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_fetch_backups_for_resource_type_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + backupvault.FetchBackupsForResourceTypeResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.fetch_backups_for_resource_type", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "FetchBackupsForResourceType", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _FetchDataSourceReferencesForResourceType( _BaseBackupDRRestTransport._BaseFetchDataSourceReferencesForResourceType, BackupDRRestStub, @@ -6426,6 +6703,160 @@ def __call__( ) return resp + class _ListDataSourceReferences( + _BaseBackupDRRestTransport._BaseListDataSourceReferences, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.ListDataSourceReferences") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: datasourcereference.ListDataSourceReferencesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datasourcereference.ListDataSourceReferencesResponse: + r"""Call the list data source + references method over HTTP. + + Args: + request (~.datasourcereference.ListDataSourceReferencesRequest): + The request object. Request for the + ListDataSourceReferences method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datasourcereference.ListDataSourceReferencesResponse: + Response for the + ListDataSourceReferences method. + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseListDataSourceReferences._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_data_source_references( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseListDataSourceReferences._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseListDataSourceReferences._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListDataSourceReferences", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "ListDataSourceReferences", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._ListDataSourceReferences._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datasourcereference.ListDataSourceReferencesResponse() + pb_resp = datasourcereference.ListDataSourceReferencesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_data_source_references(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_source_references_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + datasourcereference.ListDataSourceReferencesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.list_data_source_references", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "ListDataSourceReferences", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _ListDataSources( _BaseBackupDRRestTransport._BaseListDataSources, BackupDRRestStub ): @@ -7915,6 +8346,17 @@ def fetch_backup_plan_associations_for_resource_type( # In C++ this would require a dynamic_cast return self._FetchBackupPlanAssociationsForResourceType(self._session, self._host, self._interceptor) # type: ignore + @property + def fetch_backups_for_resource_type( + self, + ) -> Callable[ + [backupvault.FetchBackupsForResourceTypeRequest], + backupvault.FetchBackupsForResourceTypeResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchBackupsForResourceType(self._session, self._host, self._interceptor) # type: ignore + @property def fetch_data_source_references_for_resource_type( self, @@ -8067,6 +8509,17 @@ def list_backup_vaults( # In C++ this would require a dynamic_cast return self._ListBackupVaults(self._session, self._host, self._interceptor) # type: ignore + @property + def list_data_source_references( + self, + ) -> Callable[ + [datasourcereference.ListDataSourceReferencesRequest], + datasourcereference.ListDataSourceReferencesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataSourceReferences(self._session, self._host, self._interceptor) # type: ignore + @property def list_data_sources( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py index 88c0190195f2..8d686adec184 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py @@ -623,6 +623,55 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseFetchBackupsForResourceType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "resourceType": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups:fetchForResourceType", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backupvault.FetchBackupsForResourceTypeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseFetchBackupsForResourceType._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseFetchDataSourceReferencesForResourceType: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1395,6 +1444,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListDataSourceReferences: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dataSourceReferences", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datasourcereference.ListDataSourceReferencesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseListDataSourceReferences._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListDataSources: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py index 1c31717b1f55..19a484a7e061 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py @@ -64,6 +64,7 @@ BackupApplianceLockInfo, BackupConfigInfo, BackupConfigState, + BackupGcpResource, BackupLock, BackupVault, BackupVaultView, @@ -74,6 +75,8 @@ DataSourceGcpResource, DeleteBackupRequest, DeleteBackupVaultRequest, + FetchBackupsForResourceTypeRequest, + FetchBackupsForResourceTypeResponse, FetchUsableBackupVaultsRequest, FetchUsableBackupVaultsResponse, GcpBackupConfig, @@ -143,6 +146,8 @@ FetchDataSourceReferencesForResourceTypeRequest, FetchDataSourceReferencesForResourceTypeResponse, GetDataSourceReferenceRequest, + ListDataSourceReferencesRequest, + ListDataSourceReferencesResponse, ) __all__ = ( @@ -189,6 +194,7 @@ "BackupApplianceBackupConfig", "BackupApplianceLockInfo", "BackupConfigInfo", + "BackupGcpResource", "BackupLock", "BackupVault", "CreateBackupVaultRequest", @@ -197,6 +203,8 @@ "DataSourceGcpResource", "DeleteBackupRequest", "DeleteBackupVaultRequest", + "FetchBackupsForResourceTypeRequest", + "FetchBackupsForResourceTypeResponse", "FetchUsableBackupVaultsRequest", "FetchUsableBackupVaultsResponse", "GcpBackupConfig", @@ -261,4 +269,6 @@ "FetchDataSourceReferencesForResourceTypeRequest", "FetchDataSourceReferencesForResourceTypeResponse", "GetDataSourceReferenceRequest", + "ListDataSourceReferencesRequest", + "ListDataSourceReferencesResponse", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py index 1798a7392812..bf0204de787d 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py @@ -52,6 +52,8 @@ "ListBackupVaultsResponse", "FetchUsableBackupVaultsRequest", "FetchUsableBackupVaultsResponse", + "FetchBackupsForResourceTypeRequest", + "FetchBackupsForResourceTypeResponse", "GetBackupVaultRequest", "UpdateBackupVaultRequest", "DeleteBackupVaultRequest", @@ -68,6 +70,7 @@ "RestoreBackupResponse", "TargetResource", "GcpResource", + "BackupGcpResource", }, ) @@ -1078,6 +1081,11 @@ class Backup(proto.Message): use. This field is a member of `oneof`_ ``_satisfies_pzi``. + gcp_resource (google.cloud.backupdr_v1.types.BackupGcpResource): + Output only. Unique identifier of the GCP + resource that is being backed up. + + This field is a member of `oneof`_ ``source_resource``. """ class State(proto.Enum): @@ -1281,6 +1289,12 @@ class GCPBackupPlanInfo(proto.Message): number=25, optional=True, ) + gcp_resource: "BackupGcpResource" = proto.Field( + proto.MESSAGE, + number=31, + oneof="source_resource", + message="BackupGcpResource", + ) class CreateBackupVaultRequest(proto.Message): @@ -1527,6 +1541,106 @@ def raw_page(self): ) +class FetchBackupsForResourceTypeRequest(proto.Message): + r"""Request for the FetchBackupsForResourceType method. + + Attributes: + parent (str): + Required. Datasources are the parent resource + for the backups. Format: + + projects/{project}/locations/{location}/backupVaults/{backupVaultId}/dataSources/{datasourceId} + resource_type (str): + Required. The type of the GCP resource. + Ex: sqladmin.googleapis.com/Instance + page_size (int): + Optional. The maximum number of Backups to + return. The service may return fewer than this + value. If unspecified, at most 50 Backups will + be returned. The maximum value is 100; values + above 100 will be coerced to 100. + page_token (str): + Optional. A page token, received from a previous call of + ``FetchBackupsForResourceType``. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``FetchBackupsForResourceType`` must match the call that + provided the page token. + filter (str): + Optional. A filter expression that filters + the results fetched in the response. The + expression must specify the field name, a + comparison operator, and the value that you want + to use for filtering. Supported fields: + order_by (str): + Optional. A comma-separated list of fields to + order by, sorted in ascending order. Use "desc" + after a field name for descending. + view (google.cloud.backupdr_v1.types.BackupView): + Optional. This parameter is used to specify + the view of the backup. If not specified, the + default view is BASIC. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + view: "BackupView" = proto.Field( + proto.ENUM, + number=7, + enum="BackupView", + ) + + +class FetchBackupsForResourceTypeResponse(proto.Message): + r"""Response for the FetchBackupsForResourceType method. + + Attributes: + backups (MutableSequence[google.cloud.backupdr_v1.types.Backup]): + The Backups from the specified parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + backups: MutableSequence["Backup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Backup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + class GetBackupVaultRequest(proto.Message): r"""Request message for getting a BackupVault. @@ -2246,4 +2360,34 @@ class GcpResource(proto.Message): ) +class BackupGcpResource(proto.Message): + r"""Minimum details to identify a Google Cloud resource for a + backup. + + Attributes: + gcp_resourcename (str): + Name of the Google Cloud resource. + location (str): + Location of the resource: + //"global"/"unspecified". + type_ (str): + Type of the resource. Use the Unified + Resource Type, eg. + compute.googleapis.com/Instance. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_cloudsql.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_cloudsql.py index 559ff3aacf47..f34bde2f8946 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_cloudsql.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_cloudsql.py @@ -89,9 +89,13 @@ class CloudSqlInstanceBackupProperties(proto.Message): backup. Format: projects/{project}/instances/{instance} + instance_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instance creation timestamp. instance_tier (str): Output only. The tier (or machine type) for this instance. Example: ``db-custom-1-3840`` + instance_delete_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instance delete timestamp. """ database_installed_version: str = proto.Field( @@ -106,10 +110,20 @@ class CloudSqlInstanceBackupProperties(proto.Message): proto.STRING, number=4, ) + instance_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) instance_tier: str = proto.Field( proto.STRING, number=6, ) + instance_delete_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) class CloudSqlInstanceDataSourceReferenceProperties(proto.Message): diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/datasourcereference.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/datasourcereference.py index ef92d89447e4..722c502a6c86 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/datasourcereference.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/datasourcereference.py @@ -29,6 +29,8 @@ "DataSourceBackupConfigInfo", "DataSourceGcpResourceInfo", "GetDataSourceReferenceRequest", + "ListDataSourceReferencesRequest", + "ListDataSourceReferencesResponse", "FetchDataSourceReferencesForResourceTypeRequest", "FetchDataSourceReferencesForResourceTypeResponse", }, @@ -38,6 +40,8 @@ class DataSourceReference(proto.Message): r"""DataSourceReference is a reference to a DataSource resource. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Identifier. The resource name of the DataSourceReference. @@ -63,6 +67,12 @@ class DataSourceReference(proto.Message): data_source_gcp_resource_info (google.cloud.backupdr_v1.types.DataSourceGcpResourceInfo): Output only. The GCP resource that the DataSource is associated with. + total_stored_bytes (int): + Output only. Total size of the storage used + by all backup resources for the referenced + datasource. + + This field is a member of `oneof`_ ``_total_stored_bytes``. """ name: str = proto.Field( @@ -97,6 +107,11 @@ class DataSourceReference(proto.Message): number=7, message="DataSourceGcpResourceInfo", ) + total_stored_bytes: int = proto.Field( + proto.INT64, + number=8, + optional=True, + ) class DataSourceBackupConfigInfo(proto.Message): @@ -183,6 +198,102 @@ class GetDataSourceReferenceRequest(proto.Message): ) +class ListDataSourceReferencesRequest(proto.Message): + r"""Request for the ListDataSourceReferences method. + + Attributes: + parent (str): + Required. The parent resource name. + Format: projects/{project}/locations/{location} + page_size (int): + Optional. The maximum number of + DataSourceReferences to return. The service may + return fewer than this value. If unspecified, at + most 50 DataSourceReferences will be returned. + The maximum value is 100; values above 100 will + be coerced to 100. + page_token (str): + Optional. A page token, received from a previous + ``ListDataSourceReferences`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListDataSourceReferences`` must match the call that + provided the page token. + filter (str): + Optional. A filter expression that filters the results + listed in the response. The expression must specify the + field name, a comparison operator, and the value that you + want to use for filtering. + + The following field and operator combinations are supported: + + - data_source_gcp_resource_info.gcp_resourcename with ``=``, + ``!=`` + - data_source_gcp_resource_info.type with ``=``, ``!=`` + order_by (str): + Optional. A comma-separated list of fields to order by, + sorted in ascending order. Use "desc" after a field name for + descending. + + Supported fields: + + - data_source + - data_source_gcp_resource_info.gcp_resourcename + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataSourceReferencesResponse(proto.Message): + r"""Response for the ListDataSourceReferences method. + + Attributes: + data_source_references (MutableSequence[google.cloud.backupdr_v1.types.DataSourceReference]): + The DataSourceReferences from the specified + parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + data_source_references: MutableSequence[ + "DataSourceReference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSourceReference", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + class FetchDataSourceReferencesForResourceTypeRequest(proto.Message): r"""Request for the FetchDataSourceReferencesForResourceType method. diff --git a/packages/google-cloud-backupdr/noxfile.py b/packages/google-cloud-backupdr/noxfile.py index 80a77eb1727c..534835ace6aa 100644 --- a/packages/google-cloud-backupdr/noxfile.py +++ b/packages/google-cloud-backupdr/noxfile.py @@ -27,6 +27,10 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") + ALL_PYTHON = [ "3.7", "3.8", diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py index 5b08ba3c2ba5..fa5ed0f66d24 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_async.py @@ -60,4 +60,5 @@ async def sample_create_backup_plan_association(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py index 3f0dbdfb9719..e2ee33d7a304 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_association_sync.py @@ -60,4 +60,5 @@ def sample_create_backup_plan_association(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_CreateBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py index 811ca24ebe8e..6ef520b8f0a8 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_async.py @@ -65,4 +65,5 @@ async def sample_create_backup_plan(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_CreateBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py index bdc722c10954..f1f5b6380494 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_plan_sync.py @@ -65,4 +65,5 @@ def sample_create_backup_plan(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_CreateBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py index 85184254e6b5..8cb945f6a395 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_async.py @@ -54,4 +54,5 @@ async def sample_create_backup_vault(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_CreateBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py index 95eac744841e..fad28a0c74e4 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_backup_vault_sync.py @@ -54,4 +54,5 @@ def sample_create_backup_vault(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_CreateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_async.py index 47152d8c1544..48ac3d2393c7 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_async.py @@ -54,4 +54,5 @@ async def sample_create_management_server(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_CreateManagementServer_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_sync.py index d452cb3efd8d..18559b7f4ae1 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_create_management_server_sync.py @@ -54,4 +54,5 @@ def sample_create_management_server(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_CreateManagementServer_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py index 0f314054acd2..6f20cc8e1b13 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_async.py @@ -53,4 +53,5 @@ async def sample_delete_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py index fc68dc972d08..768c4bce1866 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_async.py @@ -53,4 +53,5 @@ async def sample_delete_backup_plan_association(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py index 08d7d74502be..6b6b7861fc4b 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_association_sync.py @@ -53,4 +53,5 @@ def sample_delete_backup_plan_association(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py index f179e8e6cd7c..f93de9e06380 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_async.py @@ -53,4 +53,5 @@ async def sample_delete_backup_plan(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py index dd81a1484793..5e29346317c0 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_plan_sync.py @@ -53,4 +53,5 @@ def sample_delete_backup_plan(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py index 932899e40eed..4c0769d616d2 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_sync.py @@ -53,4 +53,5 @@ def sample_delete_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py index c0ec5815b491..115601562680 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_async.py @@ -53,4 +53,5 @@ async def sample_delete_backup_vault(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py index 443060af21aa..10ac12b21c43 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_backup_vault_sync.py @@ -53,4 +53,5 @@ def sample_delete_backup_vault(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_async.py index 0edd2a7fd936..429c1fa58402 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_async.py @@ -53,4 +53,5 @@ async def sample_delete_management_server(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteManagementServer_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_sync.py index 41b53b4d4ade..f16bd8e86ed5 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_delete_management_server_sync.py @@ -53,4 +53,5 @@ def sample_delete_management_server(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_DeleteManagementServer_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py index 79c337018dc6..e727cbd01e9b 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py @@ -45,10 +45,13 @@ async def sample_fetch_backup_plan_associations_for_resource_type(): ) # Make the request - page_result = client.fetch_backup_plan_associations_for_resource_type(request=request) + page_result = client.fetch_backup_plan_associations_for_resource_type( + request=request + ) # Handle the response async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_FetchBackupPlanAssociationsForResourceType_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py index 5b1b473b59e8..a0ee9eb76b1d 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py @@ -45,10 +45,13 @@ def sample_fetch_backup_plan_associations_for_resource_type(): ) # Make the request - page_result = client.fetch_backup_plan_associations_for_resource_type(request=request) + page_result = client.fetch_backup_plan_associations_for_resource_type( + request=request + ) # Handle the response for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_FetchBackupPlanAssociationsForResourceType_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_async.py new file mode 100644 index 000000000000..361659cc36c2 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchBackupsForResourceType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchBackupsForResourceType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_fetch_backups_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchBackupsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_backups_for_resource_type(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END backupdr_v1_generated_BackupDR_FetchBackupsForResourceType_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_sync.py new file mode 100644 index 000000000000..402a261a9a6a --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchBackupsForResourceType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchBackupsForResourceType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_fetch_backups_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchBackupsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_backups_for_resource_type(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END backupdr_v1_generated_BackupDR_FetchBackupsForResourceType_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py index 43838143c70c..2f31657dbe9d 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py @@ -51,4 +51,5 @@ async def sample_fetch_data_source_references_for_resource_type(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_FetchDataSourceReferencesForResourceType_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py index 3ba0f77a07e5..07db5c2e68b0 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py @@ -51,4 +51,5 @@ def sample_fetch_data_source_references_for_resource_type(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_FetchDataSourceReferencesForResourceType_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py index b0ef45fcbc46..4dff4804de6c 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py @@ -50,4 +50,5 @@ async def sample_fetch_usable_backup_vaults(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py index 136840f477df..66b62fbdbf59 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py @@ -50,4 +50,5 @@ def sample_fetch_usable_backup_vaults(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py index 11e966e3b29c..f501bee80cb3 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_async.py @@ -49,4 +49,5 @@ async def sample_get_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py index 853eaae17a83..ecf5c2cf0ccf 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py @@ -49,4 +49,5 @@ async def sample_get_backup_plan_association(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py index b28f30a16894..4d01fb6a05fe 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py @@ -49,4 +49,5 @@ def sample_get_backup_plan_association(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py index 7b6e75c07bdb..779f1af6c6db 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_async.py @@ -49,4 +49,5 @@ async def sample_get_backup_plan(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py index 64e827830c04..f8d1c8f0a9d2 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py @@ -49,4 +49,5 @@ async def sample_get_backup_plan_revision(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackupPlanRevision_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py index cf3c058e747e..a373af746664 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py @@ -49,4 +49,5 @@ def sample_get_backup_plan_revision(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackupPlanRevision_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py index 1a8aec29d169..a6e02c3b52e4 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_sync.py @@ -49,4 +49,5 @@ def sample_get_backup_plan(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py index e4e88a9b9d9c..d0a8d34356aa 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_sync.py @@ -49,4 +49,5 @@ def sample_get_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py index 62b0e487318a..1a609ed29b19 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_async.py @@ -49,4 +49,5 @@ async def sample_get_backup_vault(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py index 939097ee61d3..36d0ee072b2e 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_vault_sync.py @@ -49,4 +49,5 @@ def sample_get_backup_vault(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py index 2dc60d9ec61b..c5153e04e351 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_async.py @@ -49,4 +49,5 @@ async def sample_get_data_source(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_async.py index 47a19246fa69..4f232759b045 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_async.py @@ -49,4 +49,5 @@ async def sample_get_data_source_reference(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetDataSourceReference_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py index 8172d71f1a12..d9fb2f2a1492 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py @@ -49,4 +49,5 @@ def sample_get_data_source_reference(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetDataSourceReference_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py index 699f00f6b791..b4fb2785dbdc 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_sync.py @@ -49,4 +49,5 @@ def sample_get_data_source(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_async.py index a27dab1f1352..9cdb5bfb7911 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_async.py @@ -49,4 +49,5 @@ async def sample_get_management_server(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetManagementServer_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_sync.py index b1972372ec39..01e9cb5a0389 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_management_server_sync.py @@ -49,4 +49,5 @@ def sample_get_management_server(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_GetManagementServer_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py index 204a26bb960e..dafe724a48c6 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py @@ -39,7 +39,9 @@ async def sample_initialize_service(): client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - cloud_sql_instance_initialization_config = backupdr_v1.CloudSqlInstanceInitializationConfig() + cloud_sql_instance_initialization_config = ( + backupdr_v1.CloudSqlInstanceInitializationConfig() + ) cloud_sql_instance_initialization_config.edition = "ENTERPRISE_PLUS" request = backupdr_v1.InitializeServiceRequest( @@ -58,4 +60,5 @@ async def sample_initialize_service(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_InitializeService_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py index 2bb388010b2f..3c92f0d54340 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py @@ -39,7 +39,9 @@ def sample_initialize_service(): client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - cloud_sql_instance_initialization_config = backupdr_v1.CloudSqlInstanceInitializationConfig() + cloud_sql_instance_initialization_config = ( + backupdr_v1.CloudSqlInstanceInitializationConfig() + ) cloud_sql_instance_initialization_config.edition = "ENTERPRISE_PLUS" request = backupdr_v1.InitializeServiceRequest( @@ -58,4 +60,5 @@ def sample_initialize_service(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_InitializeService_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py index 0110b00274f2..b404ada0c17d 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py @@ -50,4 +50,5 @@ async def sample_list_backup_plan_associations(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py index 5fccfddfd564..f5573eb2224e 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py @@ -50,4 +50,5 @@ def sample_list_backup_plan_associations(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py index 4bfa7c4e57ad..f36e8c567868 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py @@ -50,4 +50,5 @@ async def sample_list_backup_plan_revisions(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackupPlanRevisions_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py index 61bdb80276bd..28d37fff63e6 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py @@ -50,4 +50,5 @@ def sample_list_backup_plan_revisions(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackupPlanRevisions_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py index e89dfebf5ea8..30307de21eef 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_async.py @@ -50,4 +50,5 @@ async def sample_list_backup_plans(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackupPlans_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py index 98dc56061c82..2ba5bbbff9d0 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plans_sync.py @@ -50,4 +50,5 @@ def sample_list_backup_plans(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackupPlans_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py index e2c5e149a3f7..e94be8421c97 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_async.py @@ -50,4 +50,5 @@ async def sample_list_backup_vaults(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackupVaults_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py index 3190f19065a4..44bee1bfe677 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py @@ -50,4 +50,5 @@ def sample_list_backup_vaults(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackupVaults_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py index e5f3b18ce3a3..884ff2268ca9 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_async.py @@ -50,4 +50,5 @@ async def sample_list_backups(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackups_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py index 26d4bed230e8..e294a492883f 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backups_sync.py @@ -50,4 +50,5 @@ def sample_list_backups(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListBackups_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_source_references_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_source_references_async.py new file mode 100644 index 000000000000..728e2d92d7d2 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_source_references_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSourceReferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListDataSourceReferences_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_data_source_references(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourceReferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_source_references(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END backupdr_v1_generated_BackupDR_ListDataSourceReferences_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_source_references_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_source_references_sync.py new file mode 100644 index 000000000000..dae37dfc12f4 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_source_references_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataSourceReferences +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListDataSourceReferences_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_data_source_references(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListDataSourceReferencesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_source_references(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END backupdr_v1_generated_BackupDR_ListDataSourceReferences_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py index 85c11346a83e..1532973964ea 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_async.py @@ -50,4 +50,5 @@ async def sample_list_data_sources(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListDataSources_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py index d9f8f484caf4..b10542abe24f 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_data_sources_sync.py @@ -50,4 +50,5 @@ def sample_list_data_sources(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListDataSources_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_async.py index 176d42f7fab0..59c412c8f106 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_async.py @@ -50,4 +50,5 @@ async def sample_list_management_servers(): async for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListManagementServers_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_sync.py index c924c2a02dc1..310a0b854e7c 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_management_servers_sync.py @@ -50,4 +50,5 @@ def sample_list_management_servers(): for response in page_result: print(response) + # [END backupdr_v1_generated_BackupDR_ListManagementServers_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py index 956967baa110..2d18500fdef6 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_async.py @@ -58,4 +58,5 @@ async def sample_restore_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_RestoreBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py index 26c774d90578..0d480e287205 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_restore_backup_sync.py @@ -58,4 +58,5 @@ def sample_restore_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_RestoreBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py index d86d87769fb3..99fa8dd19ea0 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_async.py @@ -54,4 +54,5 @@ async def sample_trigger_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_TriggerBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py index 3bb8bc84f281..5efb01e30fd7 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_trigger_backup_sync.py @@ -54,4 +54,5 @@ def sample_trigger_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_TriggerBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py index 453910f810f0..ebad1a29868b 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_async.py @@ -39,8 +39,7 @@ async def sample_update_backup(): client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = backupdr_v1.UpdateBackupRequest( - ) + request = backupdr_v1.UpdateBackupRequest() # Make the request operation = client.update_backup(request=request) @@ -52,4 +51,5 @@ async def sample_update_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateBackup_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py index 772f73ee5782..7505045757cd 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py @@ -58,4 +58,5 @@ async def sample_update_backup_plan_association(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py index 2d99f9bdbdec..2ace64a0d6aa 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py @@ -58,4 +58,5 @@ def sample_update_backup_plan_association(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_async.py index 9e2997d0929b..542e12c0b9a6 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_async.py @@ -63,4 +63,5 @@ async def sample_update_backup_plan(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_sync.py index 0aaa931b12ce..76c8b8ea0c53 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_sync.py @@ -63,4 +63,5 @@ def sample_update_backup_plan(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py index 205fde499c00..c3360327c784 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_sync.py @@ -39,8 +39,7 @@ def sample_update_backup(): client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = backupdr_v1.UpdateBackupRequest( - ) + request = backupdr_v1.UpdateBackupRequest() # Make the request operation = client.update_backup(request=request) @@ -52,4 +51,5 @@ def sample_update_backup(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateBackup_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py index f96aebe03455..bfcd6b490def 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_async.py @@ -39,8 +39,7 @@ async def sample_update_backup_vault(): client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = backupdr_v1.UpdateBackupVaultRequest( - ) + request = backupdr_v1.UpdateBackupVaultRequest() # Make the request operation = client.update_backup_vault(request=request) @@ -52,4 +51,5 @@ async def sample_update_backup_vault(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateBackupVault_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py index 6ec903fd1e1b..54092571f50a 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_vault_sync.py @@ -39,8 +39,7 @@ def sample_update_backup_vault(): client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = backupdr_v1.UpdateBackupVaultRequest( - ) + request = backupdr_v1.UpdateBackupVaultRequest() # Make the request operation = client.update_backup_vault(request=request) @@ -52,4 +51,5 @@ def sample_update_backup_vault(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateBackupVault_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py index cd4beb31579a..3f3f54a75abb 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_async.py @@ -39,8 +39,7 @@ async def sample_update_data_source(): client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = backupdr_v1.UpdateDataSourceRequest( - ) + request = backupdr_v1.UpdateDataSourceRequest() # Make the request operation = client.update_data_source(request=request) @@ -52,4 +51,5 @@ async def sample_update_data_source(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateDataSource_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py index b5cfbd7bb267..b0ae3385b921 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_data_source_sync.py @@ -39,8 +39,7 @@ def sample_update_data_source(): client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = backupdr_v1.UpdateDataSourceRequest( - ) + request = backupdr_v1.UpdateDataSourceRequest() # Make the request operation = client.update_data_source(request=request) @@ -52,4 +51,5 @@ def sample_update_data_source(): # Handle the response print(response) + # [END backupdr_v1_generated_BackupDR_UpdateDataSource_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index a2c9825a0c20..d5007b677f54 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -1693,6 +1693,175 @@ ], "title": "backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_backups_for_resource_type", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchBackupsForResourceType", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchBackupsForResourceType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "resource_type", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchBackupsForResourceTypeAsyncPager", + "shortName": "fetch_backups_for_resource_type" + }, + "description": "Sample for FetchBackupsForResourceType", + "file": "backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchBackupsForResourceType_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_backups_for_resource_type", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchBackupsForResourceType", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "FetchBackupsForResourceType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.FetchBackupsForResourceTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "resource_type", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchBackupsForResourceTypePager", + "shortName": "fetch_backups_for_resource_type" + }, + "description": "Sample for FetchBackupsForResourceType", + "file": "backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_FetchBackupsForResourceType_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_fetch_backups_for_resource_type_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4269,6 +4438,167 @@ ], "title": "backupdr_v1_generated_backup_dr_list_backups_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_data_source_references", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSourceReferences", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSourceReferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourceReferencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourceReferencesAsyncPager", + "shortName": "list_data_source_references" + }, + "description": "Sample for ListDataSourceReferences", + "file": "backupdr_v1_generated_backup_dr_list_data_source_references_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSourceReferences_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_source_references_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_data_source_references", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSourceReferences", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSourceReferences" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourceReferencesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourceReferencesPager", + "shortName": "list_data_source_references" + }, + "description": "Sample for ListDataSourceReferences", + "file": "backupdr_v1_generated_backup_dr_list_data_source_references_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSourceReferences_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_source_references_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py index 7ccbfa21317b..297e70e70e74 100644 --- a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py +++ b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py @@ -49,6 +49,7 @@ class backupdrCallTransformer(cst.CSTTransformer): 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', 'ignore_backup_plan_references', ), 'delete_management_server': ('name', 'request_id', ), 'fetch_backup_plan_associations_for_resource_type': ('parent', 'resource_type', 'page_size', 'page_token', 'filter', 'order_by', ), + 'fetch_backups_for_resource_type': ('parent', 'resource_type', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), 'fetch_data_source_references_for_resource_type': ('parent', 'resource_type', 'page_size', 'page_token', 'filter', 'order_by', ), 'fetch_usable_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'get_backup': ('name', 'view', ), @@ -65,6 +66,7 @@ class backupdrCallTransformer(cst.CSTTransformer): 'list_backup_plans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), 'list_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), + 'list_data_source_references': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_data_sources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_management_servers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'restore_backup': ('name', 'request_id', 'compute_instance_target_environment', 'disk_target_environment', 'region_disk_target_environment', 'compute_instance_restore_properties', 'disk_restore_properties', ), diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index 31dd03eb7a06..eb0e320baa43 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -7050,11 +7050,11 @@ async def test_list_backups_async_pages(): @pytest.mark.parametrize( "request_type", [ - backupvault.GetBackupRequest, + backupvault.FetchBackupsForResourceTypeRequest, dict, ], ) -def test_get_backup(request_type, transport: str = "grpc"): +def test_fetch_backups_for_resource_type(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7065,39 +7065,27 @@ def test_get_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupvault.Backup( - name="name_value", - description="description_value", - etag="etag_value", - state=backupvault.Backup.State.CREATING, - backup_type=backupvault.Backup.BackupType.SCHEDULED, - resource_size_bytes=2056, - satisfies_pzs=True, - satisfies_pzi=True, + call.return_value = backupvault.FetchBackupsForResourceTypeResponse( + next_page_token="next_page_token_value", ) - response = client.get_backup(request) + response = client.fetch_backups_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupvault.GetBackupRequest() + request = backupvault.FetchBackupsForResourceTypeRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.Backup) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.state == backupvault.Backup.State.CREATING - assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED - assert response.resource_size_bytes == 2056 - assert response.satisfies_pzs is True - assert response.satisfies_pzi is True + assert isinstance(response, pagers.FetchBackupsForResourceTypePager) + assert response.next_page_token == "next_page_token_value" -def test_get_backup_non_empty_request_with_auto_populated_field(): +def test_fetch_backups_for_resource_type_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -7108,24 +7096,34 @@ def test_get_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupvault.GetBackupRequest( - name="name_value", + request = backupvault.FetchBackupsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_backup(request=request) + client.fetch_backups_for_resource_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.GetBackupRequest( - name="name_value", + assert args[0] == backupvault.FetchBackupsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_get_backup_use_cached_wrapped_rpc(): +def test_fetch_backups_for_resource_type_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7139,21 +7137,26 @@ def test_get_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods + assert ( + client._transport.fetch_backups_for_resource_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.fetch_backups_for_resource_type + ] = mock_rpc request = {} - client.get_backup(request) + client.fetch_backups_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup(request) + client.fetch_backups_for_resource_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7161,7 +7164,9 @@ def test_get_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): +async def test_fetch_backups_for_resource_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7176,7 +7181,7 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as # Ensure method has been cached assert ( - client._client._transport.get_backup + client._client._transport.fetch_backups_for_resource_type in client._client._transport._wrapped_methods ) @@ -7184,16 +7189,16 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_backup + client._client._transport.fetch_backups_for_resource_type ] = mock_rpc request = {} - await client.get_backup(request) + await client.fetch_backups_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_backup(request) + await client.fetch_backups_for_resource_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7201,8 +7206,9 @@ async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio -async def test_get_backup_async( - transport: str = "grpc_asyncio", request_type=backupvault.GetBackupRequest +async def test_fetch_backups_for_resource_type_async( + transport: str = "grpc_asyncio", + request_type=backupvault.FetchBackupsForResourceTypeRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -7214,60 +7220,50 @@ async def test_get_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupvault.Backup( - name="name_value", - description="description_value", - etag="etag_value", - state=backupvault.Backup.State.CREATING, - backup_type=backupvault.Backup.BackupType.SCHEDULED, - resource_size_bytes=2056, - satisfies_pzs=True, - satisfies_pzi=True, + backupvault.FetchBackupsForResourceTypeResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_backup(request) + response = await client.fetch_backups_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupvault.GetBackupRequest() + request = backupvault.FetchBackupsForResourceTypeRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.Backup) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.state == backupvault.Backup.State.CREATING - assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED - assert response.resource_size_bytes == 2056 - assert response.satisfies_pzs is True - assert response.satisfies_pzi is True + assert isinstance(response, pagers.FetchBackupsForResourceTypeAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_backup_async_from_dict(): - await test_get_backup_async(request_type=dict) +async def test_fetch_backups_for_resource_type_async_from_dict(): + await test_fetch_backups_for_resource_type_async(request_type=dict) -def test_get_backup_field_headers(): +def test_fetch_backups_for_resource_type_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupvault.GetBackupRequest() + request = backupvault.FetchBackupsForResourceTypeRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = backupvault.Backup() - client.get_backup(request) + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: + call.return_value = backupvault.FetchBackupsForResourceTypeResponse() + client.fetch_backups_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7278,26 +7274,30 @@ def test_get_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_backup_field_headers_async(): +async def test_fetch_backups_for_resource_type_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupvault.GetBackupRequest() + request = backupvault.FetchBackupsForResourceTypeRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) - await client.get_backup(request) + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchBackupsForResourceTypeResponse() + ) + await client.fetch_backups_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7308,35 +7308,41 @@ async def test_get_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_backup_flattened(): +def test_fetch_backups_for_resource_type_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupvault.Backup() + call.return_value = backupvault.FetchBackupsForResourceTypeResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup( - name="name_value", + client.fetch_backups_for_resource_type( + parent="parent_value", + resource_type="resource_type_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_type + mock_val = "resource_type_value" assert arg == mock_val -def test_get_backup_flattened_error(): +def test_fetch_backups_for_resource_type_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7344,41 +7350,50 @@ def test_get_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - backupvault.GetBackupRequest(), - name="name_value", + client.fetch_backups_for_resource_type( + backupvault.FetchBackupsForResourceTypeRequest(), + parent="parent_value", + resource_type="resource_type_value", ) @pytest.mark.asyncio -async def test_get_backup_flattened_async(): +async def test_fetch_backups_for_resource_type_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupvault.Backup() + call.return_value = backupvault.FetchBackupsForResourceTypeResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchBackupsForResourceTypeResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup( - name="name_value", + response = await client.fetch_backups_for_resource_type( + parent="parent_value", + resource_type="resource_type_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_type + mock_val = "resource_type_value" assert arg == mock_val @pytest.mark.asyncio -async def test_get_backup_flattened_error_async(): +async def test_fetch_backups_for_resource_type_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -7386,75 +7401,301 @@ async def test_get_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup( - backupvault.GetBackupRequest(), - name="name_value", + await client.fetch_backups_for_resource_type( + backupvault.FetchBackupsForResourceTypeRequest(), + parent="parent_value", + resource_type="resource_type_value", ) -@pytest.mark.parametrize( - "request_type", - [ - backupvault.UpdateBackupRequest, - dict, - ], -) -def test_update_backup(request_type, transport: str = "grpc"): +def test_fetch_backups_for_resource_type_pager(transport_name: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_backup(request) + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[], + next_page_token="def", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupvault.UpdateBackupRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.fetch_backups_for_resource_type( + request={}, retry=retry, timeout=timeout + ) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) -def test_update_backup_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. + +def test_fetch_backups_for_resource_type_pages(transport_name: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupvault.UpdateBackupRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_backup(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.UpdateBackupRequest() - - -def test_update_backup_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[], + next_page_token="def", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_backups_for_resource_type(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_backups_for_resource_type_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[], + next_page_token="def", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_backups_for_resource_type( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupvault.Backup) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_backups_for_resource_type_async_pages(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[], + next_page_token="def", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.FetchBackupsForResourceTypeResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_backups_for_resource_type(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupRequest, + dict, + ], +) +def test_get_backup(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + satisfies_pzs=True, + satisfies_pzi=True, + ) + response = client.get_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupvault.GetBackupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True + + +def test_get_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupvault.GetBackupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupvault.GetBackupRequest( + name="name_value", + ) + + +def test_get_backup_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) @@ -7463,26 +7704,21 @@ def test_update_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods + assert client._transport.get_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc request = {} - client.update_backup(request) + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_backup(request) + client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7490,9 +7726,7 @@ def test_update_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_backup_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_get_backup_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -7507,7 +7741,7 @@ async def test_update_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_backup + client._client._transport.get_backup in client._client._transport._wrapped_methods ) @@ -7515,21 +7749,16 @@ async def test_update_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_backup + client._client._transport.get_backup ] = mock_rpc request = {} - await client.update_backup(request) + await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.update_backup(request) + await client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7537,8 +7766,8 @@ async def test_update_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_backup_async( - transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupRequest +async def test_get_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.GetBackupRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -7550,43 +7779,60 @@ async def test_update_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + satisfies_pzs=True, + satisfies_pzi=True, + ) ) - response = await client.update_backup(request) + response = await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupvault.UpdateBackupRequest() + request = backupvault.GetBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - + assert isinstance(response, backupvault.Backup) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.etag == "etag_value" + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True + @pytest.mark.asyncio -async def test_update_backup_async_from_dict(): - await test_update_backup_async(request_type=dict) +async def test_get_backup_async_from_dict(): + await test_get_backup_async(request_type=dict) -def test_update_backup_field_headers(): +def test_get_backup_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupvault.UpdateBackupRequest() + request = backupvault.GetBackupRequest() - request.backup.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_backup(request) + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = backupvault.Backup() + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7597,28 +7843,26 @@ def test_update_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup.name=name_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_backup_field_headers_async(): +async def test_get_backup_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupvault.UpdateBackupRequest() + request = backupvault.GetBackupRequest() - request.backup.name = "name_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") - ) - await client.update_backup(request) + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) + await client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7629,39 +7873,35 @@ async def test_update_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup.name=name_value", + "name=name_value", ) in kw["metadata"] -def test_update_backup_flattened(): +def test_get_backup_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = backupvault.Backup() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_backup( - backup=backupvault.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = backupvault.Backup(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_update_backup_flattened_error(): +def test_get_backup_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -7669,48 +7909,41 @@ def test_update_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup( - backupvault.UpdateBackupRequest(), - backup=backupvault.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_update_backup_flattened_async(): +async def test_get_backup_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = backupvault.Backup() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(backupvault.Backup()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_backup( - backup=backupvault.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + response = await client.get_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].backup - mock_val = backupvault.Backup(name="name_value") - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_backup_flattened_error_async(): +async def test_get_backup_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -7718,21 +7951,20 @@ async def test_update_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_backup( - backupvault.UpdateBackupRequest(), - backup=backupvault.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + await client.get_backup( + backupvault.GetBackupRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - backupvault.DeleteBackupRequest, + backupvault.UpdateBackupRequest, dict, ], ) -def test_delete_backup(request_type, transport: str = "grpc"): +def test_update_backup(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7743,22 +7975,22 @@ def test_delete_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup(request) + response = client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupvault.DeleteBackupRequest() + request = backupvault.UpdateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_backup_non_empty_request_with_auto_populated_field(): +def test_update_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -7769,24 +8001,20 @@ def test_delete_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupvault.DeleteBackupRequest( - name="name_value", - ) + request = backupvault.UpdateBackupRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_backup(request=request) + client.update_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.DeleteBackupRequest( - name="name_value", - ) + assert args[0] == backupvault.UpdateBackupRequest() -def test_delete_backup_use_cached_wrapped_rpc(): +def test_update_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -7800,16 +8028,16 @@ def test_delete_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods + assert client._transport.update_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc request = {} - client.delete_backup(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -7819,7 +8047,7 @@ def test_delete_backup_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup(request) + client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7827,7 +8055,7 @@ def test_delete_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_backup_async_use_cached_wrapped_rpc( +async def test_update_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7844,7 +8072,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_backup + client._client._transport.update_backup in client._client._transport._wrapped_methods ) @@ -7852,11 +8080,11 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_backup + client._client._transport.update_backup ] = mock_rpc request = {} - await client.delete_backup(request) + await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -7866,7 +8094,7 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_backup(request) + await client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -7874,8 +8102,8 @@ async def test_delete_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_backup_async( - transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupRequest +async def test_update_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.UpdateBackupRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -7887,17 +8115,17 @@ async def test_delete_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_backup(request) + response = await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupvault.DeleteBackupRequest() + request = backupvault.UpdateBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -7905,25 +8133,25 @@ async def test_delete_backup_async( @pytest.mark.asyncio -async def test_delete_backup_async_from_dict(): - await test_delete_backup_async(request_type=dict) +async def test_update_backup_async_from_dict(): + await test_update_backup_async(request_type=dict) -def test_delete_backup_field_headers(): +def test_update_backup_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupvault.DeleteBackupRequest() + request = backupvault.UpdateBackupRequest() - request.name = "name_value" + request.backup.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -7934,28 +8162,28 @@ def test_delete_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_backup_field_headers_async(): +async def test_update_backup_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupvault.DeleteBackupRequest() + request = backupvault.UpdateBackupRequest() - request.name = "name_value" + request.backup.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_backup(request) + await client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -7966,35 +8194,39 @@ async def test_delete_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup.name=name_value", ) in kw["metadata"] -def test_delete_backup_flattened(): +def test_update_backup_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_backup( - name="name_value", + client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_delete_backup_flattened_error(): +def test_update_backup_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8002,20 +8234,21 @@ def test_delete_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - backupvault.DeleteBackupRequest(), - name="name_value", + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_delete_backup_flattened_async(): +async def test_update_backup_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -8024,21 +8257,25 @@ async def test_delete_backup_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_backup( - name="name_value", + response = await client.update_backup( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup + mock_val = backupvault.Backup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_delete_backup_flattened_error_async(): +async def test_update_backup_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8046,20 +8283,21 @@ async def test_delete_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_backup( - backupvault.DeleteBackupRequest(), - name="name_value", + await client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - backupvault.RestoreBackupRequest, + backupvault.DeleteBackupRequest, dict, ], ) -def test_restore_backup(request_type, transport: str = "grpc"): +def test_delete_backup(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8070,22 +8308,22 @@ def test_restore_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.restore_backup(request) + response = client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupvault.RestoreBackupRequest() + request = backupvault.DeleteBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_restore_backup_non_empty_request_with_auto_populated_field(): +def test_delete_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -8096,24 +8334,24 @@ def test_restore_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupvault.RestoreBackupRequest( + request = backupvault.DeleteBackupRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.restore_backup(request=request) + client.delete_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupvault.RestoreBackupRequest( + assert args[0] == backupvault.DeleteBackupRequest( name="name_value", ) -def test_restore_backup_use_cached_wrapped_rpc(): +def test_delete_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8127,16 +8365,16 @@ def test_restore_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.restore_backup in client._transport._wrapped_methods + assert client._transport.delete_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc request = {} - client.restore_backup(request) + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8146,7 +8384,7 @@ def test_restore_backup_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.restore_backup(request) + client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8154,7 +8392,7 @@ def test_restore_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_restore_backup_async_use_cached_wrapped_rpc( +async def test_delete_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8171,7 +8409,7 @@ async def test_restore_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.restore_backup + client._client._transport.delete_backup in client._client._transport._wrapped_methods ) @@ -8179,11 +8417,11 @@ async def test_restore_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.restore_backup + client._client._transport.delete_backup ] = mock_rpc request = {} - await client.restore_backup(request) + await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8193,7 +8431,7 @@ async def test_restore_backup_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.restore_backup(request) + await client.delete_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8201,8 +8439,8 @@ async def test_restore_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_restore_backup_async( - transport: str = "grpc_asyncio", request_type=backupvault.RestoreBackupRequest +async def test_delete_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.DeleteBackupRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -8214,17 +8452,17 @@ async def test_restore_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.restore_backup(request) + response = await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupvault.RestoreBackupRequest() + request = backupvault.DeleteBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -8232,25 +8470,25 @@ async def test_restore_backup_async( @pytest.mark.asyncio -async def test_restore_backup_async_from_dict(): - await test_restore_backup_async(request_type=dict) +async def test_delete_backup_async_from_dict(): + await test_delete_backup_async(request_type=dict) -def test_restore_backup_field_headers(): +def test_delete_backup_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupvault.RestoreBackupRequest() + request = backupvault.DeleteBackupRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_backup(request) + client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8266,23 +8504,23 @@ def test_restore_backup_field_headers(): @pytest.mark.asyncio -async def test_restore_backup_field_headers_async(): +async def test_delete_backup_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupvault.RestoreBackupRequest() + request = backupvault.DeleteBackupRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.restore_backup(request) + await client.delete_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8297,18 +8535,18 @@ async def test_restore_backup_field_headers_async(): ) in kw["metadata"] -def test_restore_backup_flattened(): +def test_delete_backup_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.restore_backup( + client.delete_backup( name="name_value", ) @@ -8321,7 +8559,7 @@ def test_restore_backup_flattened(): assert arg == mock_val -def test_restore_backup_flattened_error(): +def test_delete_backup_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8329,20 +8567,20 @@ def test_restore_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.restore_backup( - backupvault.RestoreBackupRequest(), + client.delete_backup( + backupvault.DeleteBackupRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_restore_backup_flattened_async(): +async def test_delete_backup_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -8351,7 +8589,7 @@ async def test_restore_backup_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.restore_backup( + response = await client.delete_backup( name="name_value", ) @@ -8365,7 +8603,7 @@ async def test_restore_backup_flattened_async(): @pytest.mark.asyncio -async def test_restore_backup_flattened_error_async(): +async def test_delete_backup_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8373,8 +8611,8 @@ async def test_restore_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.restore_backup( - backupvault.RestoreBackupRequest(), + await client.delete_backup( + backupvault.DeleteBackupRequest(), name="name_value", ) @@ -8382,11 +8620,11 @@ async def test_restore_backup_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - backupplan.CreateBackupPlanRequest, + backupvault.RestoreBackupRequest, dict, ], ) -def test_create_backup_plan(request_type, transport: str = "grpc"): +def test_restore_backup(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8397,24 +8635,22 @@ def test_create_backup_plan(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_backup_plan(request) + response = client.restore_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplan.CreateBackupPlanRequest() + request = backupvault.RestoreBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_backup_plan_non_empty_request_with_auto_populated_field(): +def test_restore_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -8425,28 +8661,24 @@ def test_create_backup_plan_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplan.CreateBackupPlanRequest( - parent="parent_value", - backup_plan_id="backup_plan_id_value", + request = backupvault.RestoreBackupRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_backup_plan(request=request) + client.restore_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.CreateBackupPlanRequest( - parent="parent_value", - backup_plan_id="backup_plan_id_value", + assert args[0] == backupvault.RestoreBackupRequest( + name="name_value", ) -def test_create_backup_plan_use_cached_wrapped_rpc(): +def test_restore_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8460,20 +8692,16 @@ def test_create_backup_plan_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_backup_plan in client._transport._wrapped_methods - ) + assert client._transport.restore_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_backup_plan - ] = mock_rpc + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc request = {} - client.create_backup_plan(request) + client.restore_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8483,7 +8711,7 @@ def test_create_backup_plan_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_backup_plan(request) + client.restore_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8491,7 +8719,7 @@ def test_create_backup_plan_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_backup_plan_async_use_cached_wrapped_rpc( +async def test_restore_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8508,7 +8736,7 @@ async def test_create_backup_plan_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_backup_plan + client._client._transport.restore_backup in client._client._transport._wrapped_methods ) @@ -8516,11 +8744,11 @@ async def test_create_backup_plan_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_backup_plan + client._client._transport.restore_backup ] = mock_rpc request = {} - await client.create_backup_plan(request) + await client.restore_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8530,7 +8758,7 @@ async def test_create_backup_plan_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_backup_plan(request) + await client.restore_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8538,8 +8766,8 @@ async def test_create_backup_plan_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_backup_plan_async( - transport: str = "grpc_asyncio", request_type=backupplan.CreateBackupPlanRequest +async def test_restore_backup_async( + transport: str = "grpc_asyncio", request_type=backupvault.RestoreBackupRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -8551,19 +8779,17 @@ async def test_create_backup_plan_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_backup_plan(request) + response = await client.restore_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.CreateBackupPlanRequest() + request = backupvault.RestoreBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -8571,27 +8797,25 @@ async def test_create_backup_plan_async( @pytest.mark.asyncio -async def test_create_backup_plan_async_from_dict(): - await test_create_backup_plan_async(request_type=dict) +async def test_restore_backup_async_from_dict(): + await test_restore_backup_async(request_type=dict) -def test_create_backup_plan_field_headers(): +def test_restore_backup_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.CreateBackupPlanRequest() + request = backupvault.RestoreBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup_plan(request) + client.restore_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8602,30 +8826,28 @@ def test_create_backup_plan_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_backup_plan_field_headers_async(): +async def test_restore_backup_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.CreateBackupPlanRequest() + request = backupvault.RestoreBackupRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_backup_plan(request) + await client.restore_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8636,45 +8858,35 @@ async def test_create_backup_plan_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_backup_plan_flattened(): +def test_restore_backup_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_backup_plan( - parent="parent_value", - backup_plan=backupplan.BackupPlan(name="name_value"), - backup_plan_id="backup_plan_id_value", + client.restore_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_plan - mock_val = backupplan.BackupPlan(name="name_value") - assert arg == mock_val - arg = args[0].backup_plan_id - mock_val = "backup_plan_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_backup_plan_flattened_error(): +def test_restore_backup_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -8682,24 +8894,20 @@ def test_create_backup_plan_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_plan( - backupplan.CreateBackupPlanRequest(), - parent="parent_value", - backup_plan=backupplan.BackupPlan(name="name_value"), - backup_plan_id="backup_plan_id_value", + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_backup_plan_flattened_async(): +async def test_restore_backup_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -8708,29 +8916,21 @@ async def test_create_backup_plan_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_backup_plan( - parent="parent_value", - backup_plan=backupplan.BackupPlan(name="name_value"), - backup_plan_id="backup_plan_id_value", + response = await client.restore_backup( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_plan - mock_val = backupplan.BackupPlan(name="name_value") - assert arg == mock_val - arg = args[0].backup_plan_id - mock_val = "backup_plan_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_backup_plan_flattened_error_async(): +async def test_restore_backup_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -8738,22 +8938,20 @@ async def test_create_backup_plan_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_backup_plan( - backupplan.CreateBackupPlanRequest(), - parent="parent_value", - backup_plan=backupplan.BackupPlan(name="name_value"), - backup_plan_id="backup_plan_id_value", + await client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - backupplan.UpdateBackupPlanRequest, + backupplan.CreateBackupPlanRequest, dict, ], ) -def test_update_backup_plan(request_type, transport: str = "grpc"): +def test_create_backup_plan(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8765,23 +8963,23 @@ def test_update_backup_plan(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan), "__call__" + type(client.transport.create_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_backup_plan(request) + response = client.create_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplan.UpdateBackupPlanRequest() + request = backupplan.CreateBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_backup_plan_non_empty_request_with_auto_populated_field(): +def test_create_backup_plan_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -8792,22 +8990,28 @@ def test_update_backup_plan_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplan.UpdateBackupPlanRequest() + request = backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan), "__call__" + type(client.transport.create_backup_plan), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_backup_plan(request=request) + client.create_backup_plan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.UpdateBackupPlanRequest() + assert args[0] == backupplan.CreateBackupPlanRequest( + parent="parent_value", + backup_plan_id="backup_plan_id_value", + ) -def test_update_backup_plan_use_cached_wrapped_rpc(): +def test_create_backup_plan_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8822,7 +9026,7 @@ def test_update_backup_plan_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_backup_plan in client._transport._wrapped_methods + client._transport.create_backup_plan in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -8831,10 +9035,10 @@ def test_update_backup_plan_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_backup_plan + client._transport.create_backup_plan ] = mock_rpc request = {} - client.update_backup_plan(request) + client.create_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8844,7 +9048,7 @@ def test_update_backup_plan_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_backup_plan(request) + client.create_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8852,7 +9056,7 @@ def test_update_backup_plan_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_backup_plan_async_use_cached_wrapped_rpc( +async def test_create_backup_plan_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8869,7 +9073,7 @@ async def test_update_backup_plan_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_backup_plan + client._client._transport.create_backup_plan in client._client._transport._wrapped_methods ) @@ -8877,11 +9081,11 @@ async def test_update_backup_plan_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_backup_plan + client._client._transport.create_backup_plan ] = mock_rpc request = {} - await client.update_backup_plan(request) + await client.create_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -8891,7 +9095,7 @@ async def test_update_backup_plan_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.update_backup_plan(request) + await client.create_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8899,8 +9103,8 @@ async def test_update_backup_plan_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_backup_plan_async( - transport: str = "grpc_asyncio", request_type=backupplan.UpdateBackupPlanRequest +async def test_create_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.CreateBackupPlanRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -8913,18 +9117,18 @@ async def test_update_backup_plan_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan), "__call__" + type(client.transport.create_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_backup_plan(request) + response = await client.create_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.UpdateBackupPlanRequest() + request = backupplan.CreateBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -8932,27 +9136,27 @@ async def test_update_backup_plan_async( @pytest.mark.asyncio -async def test_update_backup_plan_async_from_dict(): - await test_update_backup_plan_async(request_type=dict) +async def test_create_backup_plan_async_from_dict(): + await test_create_backup_plan_async(request_type=dict) -def test_update_backup_plan_field_headers(): +def test_create_backup_plan_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.UpdateBackupPlanRequest() + request = backupplan.CreateBackupPlanRequest() - request.backup_plan.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan), "__call__" + type(client.transport.create_backup_plan), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_backup_plan(request) + client.create_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8963,30 +9167,30 @@ def test_update_backup_plan_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup_plan.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_backup_plan_field_headers_async(): +async def test_create_backup_plan_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.UpdateBackupPlanRequest() + request = backupplan.CreateBackupPlanRequest() - request.backup_plan.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan), "__call__" + type(client.transport.create_backup_plan), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_backup_plan(request) + await client.create_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8997,41 +9201,45 @@ async def test_update_backup_plan_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup_plan.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_backup_plan_flattened(): +def test_create_backup_plan_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan), "__call__" + type(client.transport.create_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_backup_plan( + client.create_backup_plan( + parent="parent_value", backup_plan=backupplan.BackupPlan(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_plan_id="backup_plan_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].backup_plan mock_val = backupplan.BackupPlan(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" assert arg == mock_val -def test_update_backup_plan_flattened_error(): +def test_create_backup_plan_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9039,22 +9247,23 @@ def test_update_backup_plan_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup_plan( - backupplan.UpdateBackupPlanRequest(), + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", backup_plan=backupplan.BackupPlan(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_plan_id="backup_plan_id_value", ) @pytest.mark.asyncio -async def test_update_backup_plan_flattened_async(): +async def test_create_backup_plan_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan), "__call__" + type(client.transport.create_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -9064,25 +9273,29 @@ async def test_update_backup_plan_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_backup_plan( + response = await client.create_backup_plan( + parent="parent_value", backup_plan=backupplan.BackupPlan(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_plan_id="backup_plan_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].backup_plan mock_val = backupplan.BackupPlan(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].backup_plan_id + mock_val = "backup_plan_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_backup_plan_flattened_error_async(): +async def test_create_backup_plan_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9090,21 +9303,22 @@ async def test_update_backup_plan_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_backup_plan( - backupplan.UpdateBackupPlanRequest(), + await client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", backup_plan=backupplan.BackupPlan(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_plan_id="backup_plan_id_value", ) @pytest.mark.parametrize( "request_type", [ - backupplan.GetBackupPlanRequest, + backupplan.UpdateBackupPlanRequest, dict, ], ) -def test_get_backup_plan(request_type, transport: str = "grpc"): +def test_update_backup_plan(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9115,45 +9329,24 @@ def test_get_backup_plan(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan( - name="name_value", - description="description_value", - state=backupplan.BackupPlan.State.CREATING, - resource_type="resource_type_value", - etag="etag_value", - backup_vault="backup_vault_value", - backup_vault_service_account="backup_vault_service_account_value", - log_retention_days=1929, - supported_resource_types=["supported_resource_types_value"], - revision_id="revision_id_value", - revision_name="revision_name_value", - ) - response = client.get_backup_plan(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplan.GetBackupPlanRequest() + request = backupplan.UpdateBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlan) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == backupplan.BackupPlan.State.CREATING - assert response.resource_type == "resource_type_value" - assert response.etag == "etag_value" - assert response.backup_vault == "backup_vault_value" - assert response.backup_vault_service_account == "backup_vault_service_account_value" - assert response.log_retention_days == 1929 - assert response.supported_resource_types == ["supported_resource_types_value"] - assert response.revision_id == "revision_id_value" - assert response.revision_name == "revision_name_value" + assert isinstance(response, future.Future) -def test_get_backup_plan_non_empty_request_with_auto_populated_field(): +def test_update_backup_plan_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -9164,24 +9357,22 @@ def test_get_backup_plan_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplan.GetBackupPlanRequest( - name="name_value", - ) + request = backupplan.UpdateBackupPlanRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_backup_plan(request=request) + client.update_backup_plan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.GetBackupPlanRequest( - name="name_value", - ) + assert args[0] == backupplan.UpdateBackupPlanRequest() -def test_get_backup_plan_use_cached_wrapped_rpc(): +def test_update_backup_plan_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9195,21 +9386,30 @@ def test_get_backup_plan_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup_plan in client._transport._wrapped_methods + assert ( + client._transport.update_backup_plan in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_backup_plan + ] = mock_rpc request = {} - client.get_backup_plan(request) + client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_plan(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9217,7 +9417,7 @@ def test_get_backup_plan_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_backup_plan_async_use_cached_wrapped_rpc( +async def test_update_backup_plan_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9234,7 +9434,7 @@ async def test_get_backup_plan_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_backup_plan + client._client._transport.update_backup_plan in client._client._transport._wrapped_methods ) @@ -9242,16 +9442,21 @@ async def test_get_backup_plan_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_backup_plan + client._client._transport.update_backup_plan ] = mock_rpc request = {} - await client.get_backup_plan(request) + await client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_backup_plan(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9259,8 +9464,8 @@ async def test_get_backup_plan_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_backup_plan_async( - transport: str = "grpc_asyncio", request_type=backupplan.GetBackupPlanRequest +async def test_update_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.UpdateBackupPlanRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -9272,69 +9477,50 @@ async def test_get_backup_plan_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlan( - name="name_value", - description="description_value", - state=backupplan.BackupPlan.State.CREATING, - resource_type="resource_type_value", - etag="etag_value", - backup_vault="backup_vault_value", - backup_vault_service_account="backup_vault_service_account_value", - log_retention_days=1929, - supported_resource_types=["supported_resource_types_value"], - revision_id="revision_id_value", - revision_name="revision_name_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_backup_plan(request) + response = await client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.GetBackupPlanRequest() + request = backupplan.UpdateBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlan) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == backupplan.BackupPlan.State.CREATING - assert response.resource_type == "resource_type_value" - assert response.etag == "etag_value" - assert response.backup_vault == "backup_vault_value" - assert response.backup_vault_service_account == "backup_vault_service_account_value" - assert response.log_retention_days == 1929 - assert response.supported_resource_types == ["supported_resource_types_value"] - assert response.revision_id == "revision_id_value" - assert response.revision_name == "revision_name_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_backup_plan_async_from_dict(): - await test_get_backup_plan_async(request_type=dict) +async def test_update_backup_plan_async_from_dict(): + await test_update_backup_plan_async(request_type=dict) -def test_get_backup_plan_field_headers(): +def test_update_backup_plan_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.GetBackupPlanRequest() + request = backupplan.UpdateBackupPlanRequest() - request.name = "name_value" + request.backup_plan.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: - call.return_value = backupplan.BackupPlan() - client.get_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -9342,28 +9528,30 @@ def test_get_backup_plan_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup_plan.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_backup_plan_field_headers_async(): +async def test_update_backup_plan_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.GetBackupPlanRequest() + request = backupplan.UpdateBackupPlanRequest() - request.name = "name_value" + request.backup_plan.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlan() + operations_pb2.Operation(name="operations/op") ) - await client.get_backup_plan(request) + await client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9374,35 +9562,41 @@ async def test_get_backup_plan_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup_plan.name=name_value", ) in kw["metadata"] -def test_get_backup_plan_flattened(): +def test_update_backup_plan_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup_plan( - name="name_value", + client.update_backup_plan( + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_backup_plan_flattened_error(): +def test_update_backup_plan_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9410,43 +9604,50 @@ def test_get_backup_plan_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_plan( - backupplan.GetBackupPlanRequest(), - name="name_value", + client.update_backup_plan( + backupplan.UpdateBackupPlanRequest(), + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_backup_plan_flattened_async(): +async def test_update_backup_plan_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlan() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup_plan( - name="name_value", + response = await client.update_backup_plan( + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_backup_plan_flattened_error_async(): +async def test_update_backup_plan_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9454,20 +9655,21 @@ async def test_get_backup_plan_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup_plan( - backupplan.GetBackupPlanRequest(), - name="name_value", + await client.update_backup_plan( + backupplan.UpdateBackupPlanRequest(), + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - backupplan.ListBackupPlansRequest, + backupplan.GetBackupPlanRequest, dict, ], ) -def test_list_backup_plans(request_type, transport: str = "grpc"): +def test_get_backup_plan(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9478,29 +9680,45 @@ def test_list_backup_plans(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + log_retention_days=1929, + supported_resource_types=["supported_resource_types_value"], + revision_id="revision_id_value", + revision_name="revision_name_value", ) - response = client.list_backup_plans(request) + response = client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplan.ListBackupPlansRequest() + request = backupplan.GetBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlansPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + assert response.log_retention_days == 1929 + assert response.supported_resource_types == ["supported_resource_types_value"] + assert response.revision_id == "revision_id_value" + assert response.revision_name == "revision_name_value" -def test_list_backup_plans_non_empty_request_with_auto_populated_field(): +def test_get_backup_plan_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -9511,32 +9729,24 @@ def test_list_backup_plans_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplan.ListBackupPlansRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = backupplan.GetBackupPlanRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_backup_plans(request=request) + client.get_backup_plan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.ListBackupPlansRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == backupplan.GetBackupPlanRequest( + name="name_value", ) -def test_list_backup_plans_use_cached_wrapped_rpc(): +def test_get_backup_plan_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9550,23 +9760,21 @@ def test_list_backup_plans_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backup_plans in client._transport._wrapped_methods + assert client._transport.get_backup_plan in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_backup_plans - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc request = {} - client.list_backup_plans(request) + client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_plans(request) + client.get_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9574,7 +9782,7 @@ def test_list_backup_plans_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_backup_plans_async_use_cached_wrapped_rpc( +async def test_get_backup_plan_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9591,7 +9799,7 @@ async def test_list_backup_plans_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_backup_plans + client._client._transport.get_backup_plan in client._client._transport._wrapped_methods ) @@ -9599,16 +9807,16 @@ async def test_list_backup_plans_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_backup_plans + client._client._transport.get_backup_plan ] = mock_rpc request = {} - await client.list_backup_plans(request) + await client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_backup_plans(request) + await client.get_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9616,8 +9824,8 @@ async def test_list_backup_plans_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_backup_plans_async( - transport: str = "grpc_asyncio", request_type=backupplan.ListBackupPlansRequest +async def test_get_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.GetBackupPlanRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -9629,52 +9837,66 @@ async def test_list_backup_plans_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlansResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + log_retention_days=1929, + supported_resource_types=["supported_resource_types_value"], + revision_id="revision_id_value", + revision_name="revision_name_value", ) ) - response = await client.list_backup_plans(request) + response = await client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.ListBackupPlansRequest() + request = backupplan.GetBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlansAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + assert response.log_retention_days == 1929 + assert response.supported_resource_types == ["supported_resource_types_value"] + assert response.revision_id == "revision_id_value" + assert response.revision_name == "revision_name_value" @pytest.mark.asyncio -async def test_list_backup_plans_async_from_dict(): - await test_list_backup_plans_async(request_type=dict) +async def test_get_backup_plan_async_from_dict(): + await test_get_backup_plan_async(request_type=dict) -def test_list_backup_plans_field_headers(): +def test_get_backup_plan_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.ListBackupPlansRequest() + request = backupplan.GetBackupPlanRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: - call.return_value = backupplan.ListBackupPlansResponse() - client.list_backup_plans(request) + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = backupplan.BackupPlan() + client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9685,30 +9907,28 @@ def test_list_backup_plans_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_backup_plans_field_headers_async(): +async def test_get_backup_plan_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.ListBackupPlansRequest() + request = backupplan.GetBackupPlanRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlansResponse() + backupplan.BackupPlan() ) - await client.list_backup_plans(request) + await client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9719,37 +9939,35 @@ async def test_list_backup_plans_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_backup_plans_flattened(): +def test_get_backup_plan_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse() + call.return_value = backupplan.BackupPlan() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_backup_plans( - parent="parent_value", + client.get_backup_plan( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_backup_plans_flattened_error(): +def test_get_backup_plan_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9757,45 +9975,43 @@ def test_list_backup_plans_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_plans( - backupplan.ListBackupPlansRequest(), - parent="parent_value", + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_backup_plans_flattened_async(): +async def test_get_backup_plan_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse() + call.return_value = backupplan.BackupPlan() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlansResponse() + backupplan.BackupPlan() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_backup_plans( - parent="parent_value", + response = await client.get_backup_plan( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_backup_plans_flattened_error_async(): +async def test_get_backup_plan_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9803,325 +10019,127 @@ async def test_list_backup_plans_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_backup_plans( - backupplan.ListBackupPlansRequest(), - parent="parent_value", + await client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", ) -def test_list_backup_plans_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_backup_plans), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) + response = client.list_backup_plans(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupplan.BackupPlan) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_backup_plans_pages(transport_name: str = "grpc"): +def test_list_backup_plans_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_backup_plans), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) - pages = list(client.list_backup_plans(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_backup_plans_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, +def test_list_backup_plans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - async_pager = await client.list_backup_plans( - request={}, + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc + request = {} + client.list_backup_plans(request) - assert len(responses) == 6 - assert all(isinstance(i, backupplan.BackupPlan) for i in responses) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_backup_plans(request) -@pytest.mark.asyncio -async def test_list_backup_plans_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_plans(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - backupplan.DeleteBackupPlanRequest, - dict, - ], -) -def test_delete_backup_plan(request_type, transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplan.DeleteBackupPlanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplan.DeleteBackupPlanRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_backup_plan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.DeleteBackupPlanRequest( - name="name_value", - ) - - -def test_delete_backup_plan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.delete_backup_plan in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.delete_backup_plan - ] = mock_rpc - request = {} - client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_plan(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio -async def test_delete_backup_plan_async_use_cached_wrapped_rpc( +async def test_list_backup_plans_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10138,7 +10156,7 @@ async def test_delete_backup_plan_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_backup_plan + client._client._transport.list_backup_plans in client._client._transport._wrapped_methods ) @@ -10146,21 +10164,16 @@ async def test_delete_backup_plan_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_backup_plan + client._client._transport.list_backup_plans ] = mock_rpc request = {} - await client.delete_backup_plan(request) + await client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_backup_plan(request) + await client.list_backup_plans(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10168,8 +10181,8 @@ async def test_delete_backup_plan_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_backup_plan_async( - transport: str = "grpc_asyncio", request_type=backupplan.DeleteBackupPlanRequest +async def test_list_backup_plans_async( + transport: str = "grpc_asyncio", request_type=backupplan.ListBackupPlansRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -10182,46 +10195,51 @@ async def test_delete_backup_plan_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.delete_backup_plan(request) + response = await client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.DeleteBackupPlanRequest() + request = backupplan.ListBackupPlansRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListBackupPlansAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_delete_backup_plan_async_from_dict(): - await test_delete_backup_plan_async(request_type=dict) +async def test_list_backup_plans_async_from_dict(): + await test_list_backup_plans_async(request_type=dict) -def test_delete_backup_plan_field_headers(): +def test_list_backup_plans_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.DeleteBackupPlanRequest() + request = backupplan.ListBackupPlansRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup_plan(request) + call.return_value = backupplan.ListBackupPlansResponse() + client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10232,30 +10250,30 @@ def test_delete_backup_plan_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_backup_plan_field_headers_async(): +async def test_list_backup_plans_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.DeleteBackupPlanRequest() + request = backupplan.ListBackupPlansRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + backupplan.ListBackupPlansResponse() ) - await client.delete_backup_plan(request) + await client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10266,37 +10284,37 @@ async def test_delete_backup_plan_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_backup_plan_flattened(): +def test_list_backup_plans_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = backupplan.ListBackupPlansResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_backup_plan( - name="name_value", + client.list_backup_plans( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_backup_plan_flattened_error(): +def test_list_backup_plans_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10304,45 +10322,45 @@ def test_delete_backup_plan_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup_plan( - backupplan.DeleteBackupPlanRequest(), - name="name_value", + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_backup_plan_flattened_async(): +async def test_list_backup_plans_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = backupplan.ListBackupPlansResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + backupplan.ListBackupPlansResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_backup_plan( - name="name_value", + response = await client.list_backup_plans( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_backup_plan_flattened_error_async(): +async def test_list_backup_plans_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10350,20 +10368,222 @@ async def test_delete_backup_plan_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_backup_plan( - backupplan.DeleteBackupPlanRequest(), - name="name_value", + await client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + +def test_list_backup_plans_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plans(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plans( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pages(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plans(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - backupplan.GetBackupPlanRevisionRequest, + backupplan.DeleteBackupPlanRequest, dict, ], ) -def test_get_backup_plan_revision(request_type, transport: str = "grpc"): +def test_delete_backup_plan(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10375,30 +10595,23 @@ def test_get_backup_plan_revision(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_revision), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlanRevision( - name="name_value", - revision_id="revision_id_value", - state=backupplan.BackupPlanRevision.State.CREATING, - ) - response = client.get_backup_plan_revision(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplan.GetBackupPlanRevisionRequest() + request = backupplan.DeleteBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlanRevision) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.state == backupplan.BackupPlanRevision.State.CREATING + assert isinstance(response, future.Future) -def test_get_backup_plan_revision_non_empty_request_with_auto_populated_field(): +def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -10409,26 +10622,26 @@ def test_get_backup_plan_revision_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplan.GetBackupPlanRevisionRequest( + request = backupplan.DeleteBackupPlanRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_revision), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_backup_plan_revision(request=request) + client.delete_backup_plan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.GetBackupPlanRevisionRequest( + assert args[0] == backupplan.DeleteBackupPlanRequest( name="name_value", ) -def test_get_backup_plan_revision_use_cached_wrapped_rpc(): +def test_delete_backup_plan_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10443,8 +10656,7 @@ def test_get_backup_plan_revision_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_backup_plan_revision - in client._transport._wrapped_methods + client._transport.delete_backup_plan in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -10453,15 +10665,20 @@ def test_get_backup_plan_revision_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_backup_plan_revision + client._transport.delete_backup_plan ] = mock_rpc request = {} - client.get_backup_plan_revision(request) + client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_plan_revision(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10469,7 +10686,7 @@ def test_get_backup_plan_revision_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_backup_plan_revision_async_use_cached_wrapped_rpc( +async def test_delete_backup_plan_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10486,7 +10703,7 @@ async def test_get_backup_plan_revision_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_backup_plan_revision + client._client._transport.delete_backup_plan in client._client._transport._wrapped_methods ) @@ -10494,16 +10711,21 @@ async def test_get_backup_plan_revision_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_backup_plan_revision + client._client._transport.delete_backup_plan ] = mock_rpc request = {} - await client.get_backup_plan_revision(request) + await client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_backup_plan_revision(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10511,9 +10733,8 @@ async def test_get_backup_plan_revision_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_backup_plan_revision_async( - transport: str = "grpc_asyncio", - request_type=backupplan.GetBackupPlanRevisionRequest, +async def test_delete_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.DeleteBackupPlanRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -10526,53 +10747,46 @@ async def test_get_backup_plan_revision_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_revision), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlanRevision( - name="name_value", - revision_id="revision_id_value", - state=backupplan.BackupPlanRevision.State.CREATING, - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_backup_plan_revision(request) + response = await client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.GetBackupPlanRevisionRequest() + request = backupplan.DeleteBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlanRevision) - assert response.name == "name_value" - assert response.revision_id == "revision_id_value" - assert response.state == backupplan.BackupPlanRevision.State.CREATING + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_backup_plan_revision_async_from_dict(): - await test_get_backup_plan_revision_async(request_type=dict) +async def test_delete_backup_plan_async_from_dict(): + await test_delete_backup_plan_async(request_type=dict) -def test_get_backup_plan_revision_field_headers(): +def test_delete_backup_plan_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.GetBackupPlanRevisionRequest() + request = backupplan.DeleteBackupPlanRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_revision), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: - call.return_value = backupplan.BackupPlanRevision() - client.get_backup_plan_revision(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10588,25 +10802,25 @@ def test_get_backup_plan_revision_field_headers(): @pytest.mark.asyncio -async def test_get_backup_plan_revision_field_headers_async(): +async def test_delete_backup_plan_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.GetBackupPlanRevisionRequest() + request = backupplan.DeleteBackupPlanRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_revision), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlanRevision() + operations_pb2.Operation(name="operations/op") ) - await client.get_backup_plan_revision(request) + await client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10621,20 +10835,20 @@ async def test_get_backup_plan_revision_field_headers_async(): ) in kw["metadata"] -def test_get_backup_plan_revision_flattened(): +def test_delete_backup_plan_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_revision), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlanRevision() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup_plan_revision( + client.delete_backup_plan( name="name_value", ) @@ -10647,7 +10861,7 @@ def test_get_backup_plan_revision_flattened(): assert arg == mock_val -def test_get_backup_plan_revision_flattened_error(): +def test_delete_backup_plan_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10655,31 +10869,31 @@ def test_get_backup_plan_revision_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_plan_revision( - backupplan.GetBackupPlanRevisionRequest(), + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_backup_plan_revision_flattened_async(): +async def test_delete_backup_plan_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_revision), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlanRevision() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlanRevision() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup_plan_revision( + response = await client.delete_backup_plan( name="name_value", ) @@ -10693,7 +10907,7 @@ async def test_get_backup_plan_revision_flattened_async(): @pytest.mark.asyncio -async def test_get_backup_plan_revision_flattened_error_async(): +async def test_delete_backup_plan_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10701,8 +10915,8 @@ async def test_get_backup_plan_revision_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup_plan_revision( - backupplan.GetBackupPlanRevisionRequest(), + await client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), name="name_value", ) @@ -10710,11 +10924,11 @@ async def test_get_backup_plan_revision_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - backupplan.ListBackupPlanRevisionsRequest, + backupplan.GetBackupPlanRevisionRequest, dict, ], ) -def test_list_backup_plan_revisions(request_type, transport: str = "grpc"): +def test_get_backup_plan_revision(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10726,28 +10940,30 @@ def test_list_backup_plan_revisions(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlanRevisionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = backupplan.BackupPlanRevision( + name="name_value", + revision_id="revision_id_value", + state=backupplan.BackupPlanRevision.State.CREATING, ) - response = client.list_backup_plan_revisions(request) + response = client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplan.ListBackupPlanRevisionsRequest() + request = backupplan.GetBackupPlanRevisionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanRevisionsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backupplan.BackupPlanRevision) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.state == backupplan.BackupPlanRevision.State.CREATING -def test_list_backup_plan_revisions_non_empty_request_with_auto_populated_field(): +def test_get_backup_plan_revision_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -10758,28 +10974,26 @@ def test_list_backup_plan_revisions_non_empty_request_with_auto_populated_field( # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplan.ListBackupPlanRevisionsRequest( - parent="parent_value", - page_token="page_token_value", + request = backupplan.GetBackupPlanRevisionRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_backup_plan_revisions(request=request) + client.get_backup_plan_revision(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.ListBackupPlanRevisionsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == backupplan.GetBackupPlanRevisionRequest( + name="name_value", ) -def test_list_backup_plan_revisions_use_cached_wrapped_rpc(): +def test_get_backup_plan_revision_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10794,7 +11008,7 @@ def test_list_backup_plan_revisions_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_backup_plan_revisions + client._transport.get_backup_plan_revision in client._transport._wrapped_methods ) @@ -10804,15 +11018,15 @@ def test_list_backup_plan_revisions_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_backup_plan_revisions + client._transport.get_backup_plan_revision ] = mock_rpc request = {} - client.list_backup_plan_revisions(request) + client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_plan_revisions(request) + client.get_backup_plan_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10820,7 +11034,7 @@ def test_list_backup_plan_revisions_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_backup_plan_revisions_async_use_cached_wrapped_rpc( +async def test_get_backup_plan_revision_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10837,7 +11051,7 @@ async def test_list_backup_plan_revisions_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_backup_plan_revisions + client._client._transport.get_backup_plan_revision in client._client._transport._wrapped_methods ) @@ -10845,16 +11059,16 @@ async def test_list_backup_plan_revisions_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_backup_plan_revisions + client._client._transport.get_backup_plan_revision ] = mock_rpc request = {} - await client.list_backup_plan_revisions(request) + await client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_backup_plan_revisions(request) + await client.get_backup_plan_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10862,9 +11076,9 @@ async def test_list_backup_plan_revisions_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_backup_plan_revisions_async( +async def test_get_backup_plan_revision_async( transport: str = "grpc_asyncio", - request_type=backupplan.ListBackupPlanRevisionsRequest, + request_type=backupplan.GetBackupPlanRevisionRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -10877,51 +11091,53 @@ async def test_list_backup_plan_revisions_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlanRevisionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + backupplan.BackupPlanRevision( + name="name_value", + revision_id="revision_id_value", + state=backupplan.BackupPlanRevision.State.CREATING, ) ) - response = await client.list_backup_plan_revisions(request) + response = await client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.ListBackupPlanRevisionsRequest() + request = backupplan.GetBackupPlanRevisionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanRevisionsAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backupplan.BackupPlanRevision) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.state == backupplan.BackupPlanRevision.State.CREATING @pytest.mark.asyncio -async def test_list_backup_plan_revisions_async_from_dict(): - await test_list_backup_plan_revisions_async(request_type=dict) +async def test_get_backup_plan_revision_async_from_dict(): + await test_get_backup_plan_revision_async(request_type=dict) -def test_list_backup_plan_revisions_field_headers(): +def test_get_backup_plan_revision_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.ListBackupPlanRevisionsRequest() + request = backupplan.GetBackupPlanRevisionRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: - call.return_value = backupplan.ListBackupPlanRevisionsResponse() - client.list_backup_plan_revisions(request) + call.return_value = backupplan.BackupPlanRevision() + client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10932,30 +11148,30 @@ def test_list_backup_plan_revisions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_backup_plan_revisions_field_headers_async(): +async def test_get_backup_plan_revision_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.ListBackupPlanRevisionsRequest() + request = backupplan.GetBackupPlanRevisionRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlanRevisionsResponse() + backupplan.BackupPlanRevision() ) - await client.list_backup_plan_revisions(request) + await client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10966,37 +11182,37 @@ async def test_list_backup_plan_revisions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_backup_plan_revisions_flattened(): +def test_get_backup_plan_revision_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlanRevisionsResponse() + call.return_value = backupplan.BackupPlanRevision() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_backup_plan_revisions( - parent="parent_value", + client.get_backup_plan_revision( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_backup_plan_revisions_flattened_error(): +def test_get_backup_plan_revision_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11004,45 +11220,45 @@ def test_list_backup_plan_revisions_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_plan_revisions( - backupplan.ListBackupPlanRevisionsRequest(), - parent="parent_value", + client.get_backup_plan_revision( + backupplan.GetBackupPlanRevisionRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_backup_plan_revisions_flattened_async(): +async def test_get_backup_plan_revision_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlanRevisionsResponse() + call.return_value = backupplan.BackupPlanRevision() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlanRevisionsResponse() + backupplan.BackupPlanRevision() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_backup_plan_revisions( - parent="parent_value", + response = await client.get_backup_plan_revision( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_backup_plan_revisions_flattened_error_async(): +async def test_get_backup_plan_revision_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11050,224 +11266,20 @@ async def test_list_backup_plan_revisions_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_backup_plan_revisions( - backupplan.ListBackupPlanRevisionsRequest(), - parent="parent_value", + await client.get_backup_plan_revision( + backupplan.GetBackupPlanRevisionRequest(), + name="name_value", ) -def test_list_backup_plan_revisions_pager(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[], - next_page_token="def", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_backup_plan_revisions( - request={}, retry=retry, timeout=timeout - ) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupplan.BackupPlanRevision) for i in results) - - -def test_list_backup_plan_revisions_pages(transport_name: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_revisions), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[], - next_page_token="def", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - ], - ), - RuntimeError, - ) - pages = list(client.list_backup_plan_revisions(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_backup_plan_revisions_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_revisions), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[], - next_page_token="def", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_plan_revisions( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupplan.BackupPlanRevision) for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_plan_revisions_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_revisions), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[], - next_page_token="def", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlanRevisionsResponse( - backup_plan_revisions=[ - backupplan.BackupPlanRevision(), - backupplan.BackupPlanRevision(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_plan_revisions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - @pytest.mark.parametrize( "request_type", [ - backupplanassociation.CreateBackupPlanAssociationRequest, + backupplan.ListBackupPlanRevisionsRequest, dict, ], ) -def test_create_backup_plan_association(request_type, transport: str = "grpc"): +def test_list_backup_plan_revisions(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11279,23 +11291,28 @@ def test_create_backup_plan_association(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_backup_plan_association(request) + call.return_value = backupplan.ListBackupPlanRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = backupplan.ListBackupPlanRevisionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListBackupPlanRevisionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): +def test_list_backup_plan_revisions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -11306,28 +11323,28 @@ def test_create_backup_plan_association_non_empty_request_with_auto_populated_fi # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.CreateBackupPlanAssociationRequest( + request = backupplan.ListBackupPlanRevisionsRequest( parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_backup_plan_association(request=request) + client.list_backup_plan_revisions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( + assert args[0] == backupplan.ListBackupPlanRevisionsRequest( parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", + page_token="page_token_value", ) -def test_create_backup_plan_association_use_cached_wrapped_rpc(): +def test_list_backup_plan_revisions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11342,7 +11359,7 @@ def test_create_backup_plan_association_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_backup_plan_association + client._transport.list_backup_plan_revisions in client._transport._wrapped_methods ) @@ -11352,20 +11369,15 @@ def test_create_backup_plan_association_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_backup_plan_association + client._transport.list_backup_plan_revisions ] = mock_rpc request = {} - client.create_backup_plan_association(request) + client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_plan_association(request) + client.list_backup_plan_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11373,7 +11385,7 @@ def test_create_backup_plan_association_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( +async def test_list_backup_plan_revisions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11390,7 +11402,7 @@ async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_backup_plan_association + client._client._transport.list_backup_plan_revisions in client._client._transport._wrapped_methods ) @@ -11398,21 +11410,16 @@ async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_backup_plan_association + client._client._transport.list_backup_plan_revisions ] = mock_rpc request = {} - await client.create_backup_plan_association(request) + await client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_backup_plan_association(request) + await client.list_backup_plan_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11420,9 +11427,9 @@ async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_backup_plan_association_async( +async def test_list_backup_plan_revisions_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.CreateBackupPlanAssociationRequest, + request_type=backupplan.ListBackupPlanRevisionsRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -11435,46 +11442,51 @@ async def test_create_backup_plan_association_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + backupplan.ListBackupPlanRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.create_backup_plan_association(request) + response = await client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = backupplan.ListBackupPlanRevisionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListBackupPlanRevisionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_create_backup_plan_association_async_from_dict(): - await test_create_backup_plan_association_async(request_type=dict) +async def test_list_backup_plan_revisions_async_from_dict(): + await test_list_backup_plan_revisions_async(request_type=dict) -def test_create_backup_plan_association_field_headers(): +def test_list_backup_plan_revisions_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = backupplan.ListBackupPlanRevisionsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup_plan_association(request) + call.return_value = backupplan.ListBackupPlanRevisionsResponse() + client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11490,25 +11502,25 @@ def test_create_backup_plan_association_field_headers(): @pytest.mark.asyncio -async def test_create_backup_plan_association_field_headers_async(): +async def test_list_backup_plan_revisions_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = backupplan.ListBackupPlanRevisionsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + backupplan.ListBackupPlanRevisionsResponse() ) - await client.create_backup_plan_association(request) + await client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11523,25 +11535,21 @@ async def test_create_backup_plan_association_field_headers_async(): ) in kw["metadata"] -def test_create_backup_plan_association_flattened(): +def test_list_backup_plan_revisions_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = backupplan.ListBackupPlanRevisionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_backup_plan_association( + client.list_backup_plan_revisions( parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", ) # Establish that the underlying call was made with the expected @@ -11551,15 +11559,9 @@ def test_create_backup_plan_association_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].backup_plan_association - mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") - assert arg == mock_val - arg = args[0].backup_plan_association_id - mock_val = "backup_plan_association_id_value" - assert arg == mock_val -def test_create_backup_plan_association_flattened_error(): +def test_list_backup_plan_revisions_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11567,40 +11569,32 @@ def test_create_backup_plan_association_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_plan_association( - backupplanassociation.CreateBackupPlanAssociationRequest(), + client.list_backup_plan_revisions( + backupplan.ListBackupPlanRevisionsRequest(), parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", ) @pytest.mark.asyncio -async def test_create_backup_plan_association_flattened_async(): +async def test_list_backup_plan_revisions_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = backupplan.ListBackupPlanRevisionsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + backupplan.ListBackupPlanRevisionsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_backup_plan_association( + response = await client.list_backup_plan_revisions( parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", ) # Establish that the underlying call was made with the expected @@ -11610,16 +11604,10 @@ async def test_create_backup_plan_association_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].backup_plan_association - mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") - assert arg == mock_val - arg = args[0].backup_plan_association_id - mock_val = "backup_plan_association_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_create_backup_plan_association_flattened_error_async(): +async def test_list_backup_plan_revisions_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11627,106 +11615,312 @@ async def test_create_backup_plan_association_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_backup_plan_association( - backupplanassociation.CreateBackupPlanAssociationRequest(), + await client.list_backup_plan_revisions( + backupplan.ListBackupPlanRevisionsRequest(), parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", ) -@pytest.mark.parametrize( - "request_type", - [ - backupplanassociation.UpdateBackupPlanAssociationRequest, - dict, - ], -) -def test_update_backup_plan_association(request_type, transport: str = "grpc"): +def test_list_backup_plan_revisions_pager(transport_name: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_backup_plan_association(request) + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], + next_page_token="def", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + ), + RuntimeError, + ) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplanassociation.UpdateBackupPlanAssociationRequest() - assert args[0] == request + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plan_revisions( + request={}, retry=retry, timeout=timeout + ) - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlanRevision) for i in results) -def test_update_backup_plan_association_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_list_backup_plan_revisions_pages(transport_name: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport_name, ) - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplanassociation.UpdateBackupPlanAssociationRequest() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan_association), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_backup_plan_association(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.UpdateBackupPlanAssociationRequest() - - -def test_update_backup_plan_association_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], + next_page_token="def", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + ), + RuntimeError, ) + pages = list(client.list_backup_plan_revisions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert ( - client._transport.update_backup_plan_association - in client._transport._wrapped_methods - ) +@pytest.mark.asyncio +async def test_list_backup_plan_revisions_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], + next_page_token="def", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plan_revisions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupplan.BackupPlanRevision) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_plan_revisions_async_pages(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], + next_page_token="def", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plan_revisions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.CreateBackupPlanAssociationRequest, + dict, + ], +) +def test_create_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.CreateBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_create_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client._transport._wrapped_methods[ - client._transport.update_backup_plan_association + client._transport.create_backup_plan_association ] = mock_rpc request = {} - client.update_backup_plan_association(request) + client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -11736,7 +11930,7 @@ def test_update_backup_plan_association_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_backup_plan_association(request) + client.create_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11744,7 +11938,7 @@ def test_update_backup_plan_association_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_backup_plan_association_async_use_cached_wrapped_rpc( +async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11761,7 +11955,7 @@ async def test_update_backup_plan_association_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_backup_plan_association + client._client._transport.create_backup_plan_association in client._client._transport._wrapped_methods ) @@ -11769,11 +11963,11 @@ async def test_update_backup_plan_association_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_backup_plan_association + client._client._transport.create_backup_plan_association ] = mock_rpc request = {} - await client.update_backup_plan_association(request) + await client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -11783,7 +11977,7 @@ async def test_update_backup_plan_association_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.update_backup_plan_association(request) + await client.create_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11791,9 +11985,9 @@ async def test_update_backup_plan_association_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_backup_plan_association_async( +async def test_create_backup_plan_association_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.UpdateBackupPlanAssociationRequest, + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -11806,18 +12000,18 @@ async def test_update_backup_plan_association_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_backup_plan_association(request) + response = await client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.UpdateBackupPlanAssociationRequest() + request = backupplanassociation.CreateBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -11825,27 +12019,27 @@ async def test_update_backup_plan_association_async( @pytest.mark.asyncio -async def test_update_backup_plan_association_async_from_dict(): - await test_update_backup_plan_association_async(request_type=dict) +async def test_create_backup_plan_association_async_from_dict(): + await test_create_backup_plan_association_async(request_type=dict) -def test_update_backup_plan_association_field_headers(): +def test_create_backup_plan_association_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.UpdateBackupPlanAssociationRequest() + request = backupplanassociation.CreateBackupPlanAssociationRequest() - request.backup_plan_association.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_backup_plan_association(request) + client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11856,30 +12050,30 @@ def test_update_backup_plan_association_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup_plan_association.name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_backup_plan_association_field_headers_async(): +async def test_create_backup_plan_association_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.UpdateBackupPlanAssociationRequest() + request = backupplanassociation.CreateBackupPlanAssociationRequest() - request.backup_plan_association.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_backup_plan_association(request) + await client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11890,43 +12084,47 @@ async def test_update_backup_plan_association_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "backup_plan_association.name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_update_backup_plan_association_flattened(): +def test_create_backup_plan_association_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_backup_plan_association( + client.create_backup_plan_association( + parent="parent_value", backup_plan_association=backupplanassociation.BackupPlanAssociation( name="name_value" ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_plan_association_id="backup_plan_association_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].backup_plan_association mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" assert arg == mock_val -def test_update_backup_plan_association_flattened_error(): +def test_create_backup_plan_association_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11934,24 +12132,25 @@ def test_update_backup_plan_association_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup_plan_association( - backupplanassociation.UpdateBackupPlanAssociationRequest(), + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", backup_plan_association=backupplanassociation.BackupPlanAssociation( name="name_value" ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_plan_association_id="backup_plan_association_id_value", ) @pytest.mark.asyncio -async def test_update_backup_plan_association_flattened_async(): +async def test_create_backup_plan_association_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -11961,27 +12160,31 @@ async def test_update_backup_plan_association_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_backup_plan_association( + response = await client.create_backup_plan_association( + parent="parent_value", backup_plan_association=backupplanassociation.BackupPlanAssociation( name="name_value" ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_plan_association_id="backup_plan_association_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val arg = args[0].backup_plan_association mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_update_backup_plan_association_flattened_error_async(): +async def test_create_backup_plan_association_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11989,23 +12192,24 @@ async def test_update_backup_plan_association_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_backup_plan_association( - backupplanassociation.UpdateBackupPlanAssociationRequest(), + await client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", backup_plan_association=backupplanassociation.BackupPlanAssociation( name="name_value" ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + backup_plan_association_id="backup_plan_association_id_value", ) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.GetBackupPlanAssociationRequest, + backupplanassociation.UpdateBackupPlanAssociationRequest, dict, ], ) -def test_get_backup_plan_association(request_type, transport: str = "grpc"): +def test_update_backup_plan_association(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12017,40 +12221,23 @@ def test_get_backup_plan_association(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.update_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation( - name="name_value", - resource_type="resource_type_value", - resource="resource_value", - backup_plan="backup_plan_value", - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source="data_source_value", - backup_plan_revision_id="backup_plan_revision_id_value", - backup_plan_revision_name="backup_plan_revision_name_value", - ) - response = client.get_backup_plan_association(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplanassociation.BackupPlanAssociation) - assert response.name == "name_value" - assert response.resource_type == "resource_type_value" - assert response.resource == "resource_value" - assert response.backup_plan == "backup_plan_value" - assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING - assert response.data_source == "data_source_value" - assert response.backup_plan_revision_id == "backup_plan_revision_id_value" - assert response.backup_plan_revision_name == "backup_plan_revision_name_value" + assert isinstance(response, future.Future) -def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): +def test_update_backup_plan_association_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -12061,26 +12248,22 @@ def test_get_backup_plan_association_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.GetBackupPlanAssociationRequest( - name="name_value", - ) + request = backupplanassociation.UpdateBackupPlanAssociationRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.update_backup_plan_association), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_backup_plan_association(request=request) + client.update_backup_plan_association(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( - name="name_value", - ) + assert args[0] == backupplanassociation.UpdateBackupPlanAssociationRequest() -def test_get_backup_plan_association_use_cached_wrapped_rpc(): +def test_update_backup_plan_association_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12095,7 +12278,7 @@ def test_get_backup_plan_association_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_backup_plan_association + client._transport.update_backup_plan_association in client._transport._wrapped_methods ) @@ -12105,15 +12288,20 @@ def test_get_backup_plan_association_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_backup_plan_association + client._transport.update_backup_plan_association ] = mock_rpc request = {} - client.get_backup_plan_association(request) + client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_plan_association(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12121,7 +12309,7 @@ def test_get_backup_plan_association_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( +async def test_update_backup_plan_association_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12138,7 +12326,7 @@ async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_backup_plan_association + client._client._transport.update_backup_plan_association in client._client._transport._wrapped_methods ) @@ -12146,16 +12334,21 @@ async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_backup_plan_association + client._client._transport.update_backup_plan_association ] = mock_rpc request = {} - await client.get_backup_plan_association(request) + await client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_backup_plan_association(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12163,9 +12356,9 @@ async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_backup_plan_association_async( +async def test_update_backup_plan_association_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.GetBackupPlanAssociationRequest, + request_type=backupplanassociation.UpdateBackupPlanAssociationRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -12178,63 +12371,46 @@ async def test_get_backup_plan_association_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.update_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.BackupPlanAssociation( - name="name_value", - resource_type="resource_type_value", - resource="resource_value", - backup_plan="backup_plan_value", - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source="data_source_value", - backup_plan_revision_id="backup_plan_revision_id_value", - backup_plan_revision_name="backup_plan_revision_name_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_backup_plan_association(request) + response = await client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplanassociation.BackupPlanAssociation) - assert response.name == "name_value" - assert response.resource_type == "resource_type_value" - assert response.resource == "resource_value" - assert response.backup_plan == "backup_plan_value" - assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING - assert response.data_source == "data_source_value" - assert response.backup_plan_revision_id == "backup_plan_revision_id_value" - assert response.backup_plan_revision_name == "backup_plan_revision_name_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_backup_plan_association_async_from_dict(): - await test_get_backup_plan_association_async(request_type=dict) +async def test_update_backup_plan_association_async_from_dict(): + await test_update_backup_plan_association_async(request_type=dict) -def test_get_backup_plan_association_field_headers(): +def test_update_backup_plan_association_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() - request.name = "name_value" + request.backup_plan_association.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.update_backup_plan_association), "__call__" ) as call: - call.return_value = backupplanassociation.BackupPlanAssociation() - client.get_backup_plan_association(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12245,30 +12421,30 @@ def test_get_backup_plan_association_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup_plan_association.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_backup_plan_association_field_headers_async(): +async def test_update_backup_plan_association_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() - request.name = "name_value" + request.backup_plan_association.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.update_backup_plan_association), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.BackupPlanAssociation() + operations_pb2.Operation(name="operations/op") ) - await client.get_backup_plan_association(request) + await client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12279,37 +12455,43 @@ async def test_get_backup_plan_association_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup_plan_association.name=name_value", ) in kw["metadata"] -def test_get_backup_plan_association_flattened(): +def test_update_backup_plan_association_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.update_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup_plan_association( - name="name_value", + client.update_backup_plan_association( + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_backup_plan_association_flattened_error(): +def test_update_backup_plan_association_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12317,45 +12499,54 @@ def test_get_backup_plan_association_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_plan_association( - backupplanassociation.GetBackupPlanAssociationRequest(), - name="name_value", + client.update_backup_plan_association( + backupplanassociation.UpdateBackupPlanAssociationRequest(), + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_backup_plan_association_flattened_async(): +async def test_update_backup_plan_association_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.update_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.BackupPlanAssociation() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup_plan_association( - name="name_value", + response = await client.update_backup_plan_association( + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_backup_plan_association_flattened_error_async(): +async def test_update_backup_plan_association_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12363,20 +12554,23 @@ async def test_get_backup_plan_association_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup_plan_association( - backupplanassociation.GetBackupPlanAssociationRequest(), - name="name_value", + await client.update_backup_plan_association( + backupplanassociation.UpdateBackupPlanAssociationRequest(), + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.ListBackupPlanAssociationsRequest, + backupplanassociation.GetBackupPlanAssociationRequest, dict, ], ) -def test_list_backup_plan_associations(request_type, transport: str = "grpc"): +def test_get_backup_plan_association(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12388,28 +12582,964 @@ def test_list_backup_plan_associations(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.get_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + backup_plan_revision_id="backup_plan_revision_id_value", + backup_plan_revision_name="backup_plan_revision_name_value", + ) + response = client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + assert response.backup_plan_revision_id == "backup_plan_revision_id_value" + assert response.backup_plan_revision_name == "backup_plan_revision_name_value" + + +def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_get_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_backup_plan_association + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + backup_plan_revision_id="backup_plan_revision_id_value", + backup_plan_revision_name="backup_plan_revision_name_value", + ) + ) + response = await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.GetBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + assert response.backup_plan_revision_id == "backup_plan_revision_id_value" + assert response.backup_plan_revision_name == "backup_plan_revision_name_value" + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_async_from_dict(): + await test_get_backup_plan_association_async(request_type=dict) + + +def test_get_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = backupplanassociation.BackupPlanAssociation() + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.GetBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + await client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan_association( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.ListBackupPlanAssociationsRequest, + dict, + ], +) +def test_list_backup_plan_associations(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_backup_plan_associations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_plan_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plan_associations + ] = mock_rpc + request = {} + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_backup_plan_associations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plan_associations + ] = mock_rpc + + request = {} + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_from_dict(): + await test_list_backup_plan_associations_async(request_type=dict) + + +def test_list_backup_plan_associations_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_backup_plan_associations_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_backup_plan_associations_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plan_associations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plan_associations( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) + + +def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plan_associations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plan_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pages(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plan_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + dict, + ], +) +def test_fetch_backup_plan_associations_for_resource_type( + request_type, transport: str = "grpc" +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + next_page_token="next_page_token_value", + ) ) - response = client.list_backup_plan_associations(request) + response = client.fetch_backup_plan_associations_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + ) assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert isinstance(response, pagers.FetchBackupPlanAssociationsForResourceTypePager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] -def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): +def test_fetch_backup_plan_associations_for_resource_type_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -12420,30 +13550,37 @@ def test_list_backup_plan_associations_non_empty_request_with_auto_populated_fie # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.ListBackupPlanAssociationsRequest( + request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( parent="parent_value", + resource_type="resource_type_value", page_token="page_token_value", filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_backup_plan_associations(request=request) + client.fetch_backup_plan_associations_for_resource_type(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( + assert args[ + 0 + ] == backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( parent="parent_value", + resource_type="resource_type_value", page_token="page_token_value", filter="filter_value", + order_by="order_by_value", ) -def test_list_backup_plan_associations_use_cached_wrapped_rpc(): +def test_fetch_backup_plan_associations_for_resource_type_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12458,7 +13595,7 @@ def test_list_backup_plan_associations_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_backup_plan_associations + client._transport.fetch_backup_plan_associations_for_resource_type in client._transport._wrapped_methods ) @@ -12468,15 +13605,15 @@ def test_list_backup_plan_associations_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_backup_plan_associations + client._transport.fetch_backup_plan_associations_for_resource_type ] = mock_rpc request = {} - client.list_backup_plan_associations(request) + client.fetch_backup_plan_associations_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_plan_associations(request) + client.fetch_backup_plan_associations_for_resource_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12484,7 +13621,7 @@ def test_list_backup_plan_associations_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( +async def test_fetch_backup_plan_associations_for_resource_type_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12501,7 +13638,7 @@ async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_backup_plan_associations + client._client._transport.fetch_backup_plan_associations_for_resource_type in client._client._transport._wrapped_methods ) @@ -12509,16 +13646,16 @@ async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_backup_plan_associations + client._client._transport.fetch_backup_plan_associations_for_resource_type ] = mock_rpc request = {} - await client.list_backup_plan_associations(request) + await client.fetch_backup_plan_associations_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_backup_plan_associations(request) + await client.fetch_backup_plan_associations_for_resource_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12526,9 +13663,9 @@ async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_backup_plan_associations_async( +async def test_fetch_backup_plan_associations_for_resource_type_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.ListBackupPlanAssociationsRequest, + request_type=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -12541,51 +13678,59 @@ async def test_list_backup_plan_associations_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], ) ) - response = await client.list_backup_plan_associations(request) + response = await client.fetch_backup_plan_associations_for_resource_type( + request + ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + ) assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) + assert isinstance( + response, pagers.FetchBackupPlanAssociationsForResourceTypeAsyncPager + ) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_backup_plan_associations_async_from_dict(): - await test_list_backup_plan_associations_async(request_type=dict) +async def test_fetch_backup_plan_associations_for_resource_type_async_from_dict(): + await test_fetch_backup_plan_associations_for_resource_type_async(request_type=dict) -def test_list_backup_plan_associations_field_headers(): +def test_fetch_backup_plan_associations_for_resource_type_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", ) as call: - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.list_backup_plan_associations(request) + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) + client.fetch_backup_plan_associations_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12601,25 +13746,26 @@ def test_list_backup_plan_associations_field_headers(): @pytest.mark.asyncio -async def test_list_backup_plan_associations_field_headers_async(): +async def test_fetch_backup_plan_associations_for_resource_type_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.ListBackupPlanAssociationsResponse() + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() ) - await client.list_backup_plan_associations(request) + await client.fetch_backup_plan_associations_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12634,21 +13780,25 @@ async def test_list_backup_plan_associations_field_headers_async(): ) in kw["metadata"] -def test_list_backup_plan_associations_flattened(): +def test_fetch_backup_plan_associations_for_resource_type_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_backup_plan_associations( + client.fetch_backup_plan_associations_for_resource_type( parent="parent_value", + resource_type="resource_type_value", ) # Establish that the underlying call was made with the expected @@ -12658,9 +13808,12 @@ def test_list_backup_plan_associations_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].resource_type + mock_val = "resource_type_value" + assert arg == mock_val -def test_list_backup_plan_associations_flattened_error(): +def test_fetch_backup_plan_associations_for_resource_type_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -12668,32 +13821,37 @@ def test_list_backup_plan_associations_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_plan_associations( - backupplanassociation.ListBackupPlanAssociationsRequest(), + client.fetch_backup_plan_associations_for_resource_type( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest(), parent="parent_value", + resource_type="resource_type_value", ) @pytest.mark.asyncio -async def test_list_backup_plan_associations_flattened_async(): +async def test_fetch_backup_plan_associations_for_resource_type_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.ListBackupPlanAssociationsResponse() + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_backup_plan_associations( + response = await client.fetch_backup_plan_associations_for_resource_type( parent="parent_value", + resource_type="resource_type_value", ) # Establish that the underlying call was made with the expected @@ -12703,10 +13861,13 @@ async def test_list_backup_plan_associations_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val + arg = args[0].resource_type + mock_val = "resource_type_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_list_backup_plan_associations_flattened_error_async(): +async def test_fetch_backup_plan_associations_for_resource_type_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -12714,13 +13875,16 @@ async def test_list_backup_plan_associations_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_backup_plan_associations( - backupplanassociation.ListBackupPlanAssociationsRequest(), + await client.fetch_backup_plan_associations_for_resource_type( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest(), parent="parent_value", + resource_type="resource_type_value", ) -def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): +def test_fetch_backup_plan_associations_for_resource_type_pager( + transport_name: str = "grpc", +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -12728,11 +13892,12 @@ def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", ) as call: # Set the response to a series of pages. call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), backupplanassociation.BackupPlanAssociation(), @@ -12740,17 +13905,17 @@ def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[], next_page_token="def", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), ], next_page_token="ghi", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), backupplanassociation.BackupPlanAssociation(), @@ -12765,7 +13930,7 @@ def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_backup_plan_associations( + pager = client.fetch_backup_plan_associations_for_resource_type( request={}, retry=retry, timeout=timeout ) @@ -12780,7 +13945,9 @@ def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): ) -def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): +def test_fetch_backup_plan_associations_for_resource_type_pages( + transport_name: str = "grpc", +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -12788,11 +13955,12 @@ def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", ) as call: # Set the response to a series of pages. call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), backupplanassociation.BackupPlanAssociation(), @@ -12800,17 +13968,17 @@ def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[], next_page_token="def", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), ], next_page_token="ghi", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), backupplanassociation.BackupPlanAssociation(), @@ -12818,26 +13986,28 @@ def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): ), RuntimeError, ) - pages = list(client.list_backup_plan_associations(request={}).pages) + pages = list( + client.fetch_backup_plan_associations_for_resource_type(request={}).pages + ) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_backup_plan_associations_async_pager(): +async def test_fetch_backup_plan_associations_for_resource_type_async_pager(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), + type(client.transport.fetch_backup_plan_associations_for_resource_type), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), backupplanassociation.BackupPlanAssociation(), @@ -12845,17 +14015,17 @@ async def test_list_backup_plan_associations_async_pager(): ], next_page_token="abc", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[], next_page_token="def", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), ], next_page_token="ghi", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), backupplanassociation.BackupPlanAssociation(), @@ -12863,7 +14033,7 @@ async def test_list_backup_plan_associations_async_pager(): ), RuntimeError, ) - async_pager = await client.list_backup_plan_associations( + async_pager = await client.fetch_backup_plan_associations_for_resource_type( request={}, ) assert async_pager.next_page_token == "abc" @@ -12879,20 +14049,20 @@ async def test_list_backup_plan_associations_async_pager(): @pytest.mark.asyncio -async def test_list_backup_plan_associations_async_pages(): +async def test_fetch_backup_plan_associations_for_resource_type_async_pages(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), + type(client.transport.fetch_backup_plan_associations_for_resource_type), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), backupplanassociation.BackupPlanAssociation(), @@ -12900,17 +14070,17 @@ async def test_list_backup_plan_associations_async_pages(): ], next_page_token="abc", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[], next_page_token="def", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), ], next_page_token="ghi", ), - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( backup_plan_associations=[ backupplanassociation.BackupPlanAssociation(), backupplanassociation.BackupPlanAssociation(), @@ -12922,7 +14092,7 @@ async def test_list_backup_plan_associations_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_backup_plan_associations(request={}) + await client.fetch_backup_plan_associations_for_resource_type(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -12932,13 +14102,11 @@ async def test_list_backup_plan_associations_async_pages(): @pytest.mark.parametrize( "request_type", [ - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + backupplanassociation.DeleteBackupPlanAssociationRequest, dict, ], ) -def test_fetch_backup_plan_associations_for_resource_type( - request_type, transport: str = "grpc" -): +def test_delete_backup_plan_association(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12950,31 +14118,23 @@ def test_fetch_backup_plan_associations_for_resource_type( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", + type(client.transport.delete_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - next_page_token="next_page_token_value", - ) - ) - response = client.fetch_backup_plan_associations_for_resource_type(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() - ) + request = backupplanassociation.DeleteBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchBackupPlanAssociationsForResourceTypePager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) -def test_fetch_backup_plan_associations_for_resource_type_non_empty_request_with_auto_populated_field(): +def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -12985,37 +14145,26 @@ def test_fetch_backup_plan_associations_for_resource_type_non_empty_request_with # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( - parent="parent_value", - resource_type="resource_type_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", + type(client.transport.delete_backup_plan_association), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.fetch_backup_plan_associations_for_resource_type(request=request) + client.delete_backup_plan_association(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[ - 0 - ] == backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( - parent="parent_value", - resource_type="resource_type_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", ) -def test_fetch_backup_plan_associations_for_resource_type_use_cached_wrapped_rpc(): +def test_delete_backup_plan_association_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13030,7 +14179,7 @@ def test_fetch_backup_plan_associations_for_resource_type_use_cached_wrapped_rpc # Ensure method has been cached assert ( - client._transport.fetch_backup_plan_associations_for_resource_type + client._transport.delete_backup_plan_association in client._transport._wrapped_methods ) @@ -13040,15 +14189,20 @@ def test_fetch_backup_plan_associations_for_resource_type_use_cached_wrapped_rpc "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.fetch_backup_plan_associations_for_resource_type + client._transport.delete_backup_plan_association ] = mock_rpc request = {} - client.fetch_backup_plan_associations_for_resource_type(request) + client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.fetch_backup_plan_associations_for_resource_type(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13056,7 +14210,7 @@ def test_fetch_backup_plan_associations_for_resource_type_use_cached_wrapped_rpc @pytest.mark.asyncio -async def test_fetch_backup_plan_associations_for_resource_type_async_use_cached_wrapped_rpc( +async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13073,7 +14227,7 @@ async def test_fetch_backup_plan_associations_for_resource_type_async_use_cached # Ensure method has been cached assert ( - client._client._transport.fetch_backup_plan_associations_for_resource_type + client._client._transport.delete_backup_plan_association in client._client._transport._wrapped_methods ) @@ -13081,16 +14235,21 @@ async def test_fetch_backup_plan_associations_for_resource_type_async_use_cached mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.fetch_backup_plan_associations_for_resource_type + client._client._transport.delete_backup_plan_association ] = mock_rpc request = {} - await client.fetch_backup_plan_associations_for_resource_type(request) + await client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.fetch_backup_plan_associations_for_resource_type(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13098,9 +14257,9 @@ async def test_fetch_backup_plan_associations_for_resource_type_async_use_cached @pytest.mark.asyncio -async def test_fetch_backup_plan_associations_for_resource_type_async( +async def test_delete_backup_plan_association_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -13113,59 +14272,46 @@ async def test_fetch_backup_plan_associations_for_resource_type_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", + type(client.transport.delete_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.fetch_backup_plan_associations_for_resource_type( - request + operations_pb2.Operation(name="operations/spam") ) + response = await client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() - ) + request = backupplanassociation.DeleteBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance( - response, pagers.FetchBackupPlanAssociationsForResourceTypeAsyncPager - ) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_fetch_backup_plan_associations_for_resource_type_async_from_dict(): - await test_fetch_backup_plan_associations_for_resource_type_async(request_type=dict) +async def test_delete_backup_plan_association_async_from_dict(): + await test_delete_backup_plan_association_async(request_type=dict) -def test_fetch_backup_plan_associations_for_resource_type_field_headers(): +def test_delete_backup_plan_association_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + request = backupplanassociation.DeleteBackupPlanAssociationRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", + type(client.transport.delete_backup_plan_association), "__call__" ) as call: - call.return_value = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() - ) - client.fetch_backup_plan_associations_for_resource_type(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13176,372 +14322,138 @@ def test_fetch_backup_plan_associations_for_resource_type_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_fetch_backup_plan_associations_for_resource_type_field_headers_async(): +async def test_delete_backup_plan_association_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() - ) - await client.fetch_backup_plan_associations_for_resource_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_fetch_backup_plan_associations_for_resource_type_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.fetch_backup_plan_associations_for_resource_type( - parent="parent_value", - resource_type="resource_type_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].resource_type - mock_val = "resource_type_value" - assert arg == mock_val - - -def test_fetch_backup_plan_associations_for_resource_type_flattened_error(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.fetch_backup_plan_associations_for_resource_type( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest(), - parent="parent_value", - resource_type="resource_type_value", - ) - - -@pytest.mark.asyncio -async def test_fetch_backup_plan_associations_for_resource_type_flattened_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", - ) as call: - # Designate an appropriate return value for the call. - call.return_value = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() - ) - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.fetch_backup_plan_associations_for_resource_type( - parent="parent_value", - resource_type="resource_type_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].resource_type - mock_val = "resource_type_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_fetch_backup_plan_associations_for_resource_type_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.fetch_backup_plan_associations_for_resource_type( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest(), - parent="parent_value", - resource_type="resource_type_value", - ) - + request = backupplanassociation.DeleteBackupPlanAssociationRequest() -def test_fetch_backup_plan_associations_for_resource_type_pager( - transport_name: str = "grpc", -): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="abc", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[], - next_page_token="def", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="ghi", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.fetch_backup_plan_associations_for_resource_type( - request={}, retry=retry, timeout=timeout + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.delete_backup_plan_association(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_fetch_backup_plan_associations_for_resource_type_pages( - transport_name: str = "grpc", -): +def test_delete_backup_plan_association_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", + type(client.transport.delete_backup_plan_association), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="abc", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[], - next_page_token="def", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="ghi", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - RuntimeError, + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan_association( + name="name_value", ) - pages = list( - client.fetch_backup_plan_associations_for_resource_type(request={}).pages + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", ) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_fetch_backup_plan_associations_for_resource_type_async_pager(): +async def test_delete_backup_plan_association_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.delete_backup_plan_association), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="abc", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[], - next_page_token="def", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="ghi", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - RuntimeError, + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - async_pager = await client.fetch_backup_plan_associations_for_resource_type( - request={}, + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan_association( + name="name_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all( - isinstance(i, backupplanassociation.BackupPlanAssociation) - for i in responses - ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_fetch_backup_plan_associations_for_resource_type_async_pages(): +async def test_delete_backup_plan_association_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.fetch_backup_plan_associations_for_resource_type), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="abc", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[], - next_page_token="def", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="ghi", - ), - backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - RuntimeError, + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.fetch_backup_plan_associations_for_resource_type(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - backupplanassociation.DeleteBackupPlanAssociationRequest, + backupplanassociation.TriggerBackupRequest, dict, ], ) -def test_delete_backup_plan_association(request_type, transport: str = "grpc"): +def test_trigger_backup(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13552,24 +14464,22 @@ def test_delete_backup_plan_association(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup_plan_association(request) + response = client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = backupplanassociation.TriggerBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): +def test_trigger_backup_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -13580,26 +14490,26 @@ def test_delete_backup_plan_association_non_empty_request_with_auto_populated_fi # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.DeleteBackupPlanAssociationRequest( + request = backupplanassociation.TriggerBackupRequest( name="name_value", + rule_id="rule_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_backup_plan_association(request=request) + client.trigger_backup(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( + assert args[0] == backupplanassociation.TriggerBackupRequest( name="name_value", + rule_id="rule_id_value", ) -def test_delete_backup_plan_association_use_cached_wrapped_rpc(): +def test_trigger_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13613,21 +14523,16 @@ def test_delete_backup_plan_association_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_backup_plan_association - in client._transport._wrapped_methods - ) + assert client._transport.trigger_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_backup_plan_association - ] = mock_rpc + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc request = {} - client.delete_backup_plan_association(request) + client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13637,7 +14542,7 @@ def test_delete_backup_plan_association_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup_plan_association(request) + client.trigger_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13645,7 +14550,7 @@ def test_delete_backup_plan_association_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( +async def test_trigger_backup_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13662,7 +14567,7 @@ async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_backup_plan_association + client._client._transport.trigger_backup in client._client._transport._wrapped_methods ) @@ -13670,11 +14575,11 @@ async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_backup_plan_association + client._client._transport.trigger_backup ] = mock_rpc request = {} - await client.delete_backup_plan_association(request) + await client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13684,7 +14589,7 @@ async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_backup_plan_association(request) + await client.trigger_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13692,9 +14597,9 @@ async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_backup_plan_association_async( +async def test_trigger_backup_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, + request_type=backupplanassociation.TriggerBackupRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -13706,19 +14611,17 @@ async def test_delete_backup_plan_association_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_backup_plan_association(request) + response = await client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = backupplanassociation.TriggerBackupRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -13726,27 +14629,25 @@ async def test_delete_backup_plan_association_async( @pytest.mark.asyncio -async def test_delete_backup_plan_association_async_from_dict(): - await test_delete_backup_plan_association_async(request_type=dict) +async def test_trigger_backup_async_from_dict(): + await test_trigger_backup_async(request_type=dict) -def test_delete_backup_plan_association_field_headers(): +def test_trigger_backup_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = backupplanassociation.TriggerBackupRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup_plan_association(request) + client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -13762,25 +14663,23 @@ def test_delete_backup_plan_association_field_headers(): @pytest.mark.asyncio -async def test_delete_backup_plan_association_field_headers_async(): +async def test_trigger_backup_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = backupplanassociation.TriggerBackupRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_backup_plan_association(request) + await client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -13795,21 +14694,20 @@ async def test_delete_backup_plan_association_field_headers_async(): ) in kw["metadata"] -def test_delete_backup_plan_association_flattened(): +def test_trigger_backup_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_backup_plan_association( + client.trigger_backup( name="name_value", + rule_id="rule_id_value", ) # Establish that the underlying call was made with the expected @@ -13819,9 +14717,12 @@ def test_delete_backup_plan_association_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val -def test_delete_backup_plan_association_flattened_error(): +def test_trigger_backup_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -13829,22 +14730,21 @@ def test_delete_backup_plan_association_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup_plan_association( - backupplanassociation.DeleteBackupPlanAssociationRequest(), + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), name="name_value", + rule_id="rule_id_value", ) @pytest.mark.asyncio -async def test_delete_backup_plan_association_flattened_async(): +async def test_trigger_backup_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -13853,8 +14753,9 @@ async def test_delete_backup_plan_association_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_backup_plan_association( + response = await client.trigger_backup( name="name_value", + rule_id="rule_id_value", ) # Establish that the underlying call was made with the expected @@ -13864,10 +14765,13 @@ async def test_delete_backup_plan_association_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_delete_backup_plan_association_flattened_error_async(): +async def test_trigger_backup_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -13875,20 +14779,21 @@ async def test_delete_backup_plan_association_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_backup_plan_association( - backupplanassociation.DeleteBackupPlanAssociationRequest(), + await client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), name="name_value", + rule_id="rule_id_value", ) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.TriggerBackupRequest, + datasourcereference.GetDataSourceReferenceRequest, dict, ], ) -def test_trigger_backup(request_type, transport: str = "grpc"): +def test_get_data_source_reference(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13899,22 +14804,37 @@ def test_trigger_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.trigger_backup(request) + call.return_value = datasourcereference.DataSourceReference( + name="name_value", + data_source="data_source_value", + data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, + data_source_backup_count=2535, + total_stored_bytes=1946, + ) + response = client.get_data_source_reference(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.TriggerBackupRequest() + request = datasourcereference.GetDataSourceReferenceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, datasourcereference.DataSourceReference) + assert response.name == "name_value" + assert response.data_source == "data_source_value" + assert ( + response.data_source_backup_config_state == backupvault.BackupConfigState.ACTIVE + ) + assert response.data_source_backup_count == 2535 + assert response.total_stored_bytes == 1946 -def test_trigger_backup_non_empty_request_with_auto_populated_field(): +def test_get_data_source_reference_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -13925,26 +14845,26 @@ def test_trigger_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.TriggerBackupRequest( + request = datasourcereference.GetDataSourceReferenceRequest( name="name_value", - rule_id="rule_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.trigger_backup(request=request) + client.get_data_source_reference(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.TriggerBackupRequest( + assert args[0] == datasourcereference.GetDataSourceReferenceRequest( name="name_value", - rule_id="rule_id_value", ) -def test_trigger_backup_use_cached_wrapped_rpc(): +def test_get_data_source_reference_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13958,26 +14878,26 @@ def test_trigger_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.trigger_backup in client._transport._wrapped_methods + assert ( + client._transport.get_data_source_reference + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_data_source_reference + ] = mock_rpc request = {} - client.trigger_backup(request) + client.get_data_source_reference(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.trigger_backup(request) + client.get_data_source_reference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -13985,7 +14905,7 @@ def test_trigger_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_trigger_backup_async_use_cached_wrapped_rpc( +async def test_get_data_source_reference_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14002,7 +14922,7 @@ async def test_trigger_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.trigger_backup + client._client._transport.get_data_source_reference in client._client._transport._wrapped_methods ) @@ -14010,21 +14930,16 @@ async def test_trigger_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.trigger_backup + client._client._transport.get_data_source_reference ] = mock_rpc request = {} - await client.trigger_backup(request) + await client.get_data_source_reference(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.trigger_backup(request) + await client.get_data_source_reference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14032,9 +14947,9 @@ async def test_trigger_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_trigger_backup_async( +async def test_get_data_source_reference_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.TriggerBackupRequest, + request_type=datasourcereference.GetDataSourceReferenceRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -14046,43 +14961,60 @@ async def test_trigger_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + datasourcereference.DataSourceReference( + name="name_value", + data_source="data_source_value", + data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, + data_source_backup_count=2535, + total_stored_bytes=1946, + ) ) - response = await client.trigger_backup(request) + response = await client.get_data_source_reference(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.TriggerBackupRequest() + request = datasourcereference.GetDataSourceReferenceRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, datasourcereference.DataSourceReference) + assert response.name == "name_value" + assert response.data_source == "data_source_value" + assert ( + response.data_source_backup_config_state == backupvault.BackupConfigState.ACTIVE + ) + assert response.data_source_backup_count == 2535 + assert response.total_stored_bytes == 1946 @pytest.mark.asyncio -async def test_trigger_backup_async_from_dict(): - await test_trigger_backup_async(request_type=dict) +async def test_get_data_source_reference_async_from_dict(): + await test_get_data_source_reference_async(request_type=dict) -def test_trigger_backup_field_headers(): +def test_get_data_source_reference_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.TriggerBackupRequest() + request = datasourcereference.GetDataSourceReferenceRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.trigger_backup(request) + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + call.return_value = datasourcereference.DataSourceReference() + client.get_data_source_reference(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14098,23 +15030,25 @@ def test_trigger_backup_field_headers(): @pytest.mark.asyncio -async def test_trigger_backup_field_headers_async(): +async def test_get_data_source_reference_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.TriggerBackupRequest() + request = datasourcereference.GetDataSourceReferenceRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + datasourcereference.DataSourceReference() ) - await client.trigger_backup(request) + await client.get_data_source_reference(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14129,20 +15063,21 @@ async def test_trigger_backup_field_headers_async(): ) in kw["metadata"] -def test_trigger_backup_flattened(): +def test_get_data_source_reference_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = datasourcereference.DataSourceReference() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.trigger_backup( + client.get_data_source_reference( name="name_value", - rule_id="rule_id_value", ) # Establish that the underlying call was made with the expected @@ -14152,12 +15087,9 @@ def test_trigger_backup_flattened(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].rule_id - mock_val = "rule_id_value" - assert arg == mock_val -def test_trigger_backup_flattened_error(): +def test_get_data_source_reference_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14165,32 +15097,32 @@ def test_trigger_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.trigger_backup( - backupplanassociation.TriggerBackupRequest(), + client.get_data_source_reference( + datasourcereference.GetDataSourceReferenceRequest(), name="name_value", - rule_id="rule_id_value", ) @pytest.mark.asyncio -async def test_trigger_backup_flattened_async(): +async def test_get_data_source_reference_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = datasourcereference.DataSourceReference() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + datasourcereference.DataSourceReference() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.trigger_backup( + response = await client.get_data_source_reference( name="name_value", - rule_id="rule_id_value", ) # Establish that the underlying call was made with the expected @@ -14200,13 +15132,10 @@ async def test_trigger_backup_flattened_async(): arg = args[0].name mock_val = "name_value" assert arg == mock_val - arg = args[0].rule_id - mock_val = "rule_id_value" - assert arg == mock_val @pytest.mark.asyncio -async def test_trigger_backup_flattened_error_async(): +async def test_get_data_source_reference_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -14214,21 +15143,20 @@ async def test_trigger_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.trigger_backup( - backupplanassociation.TriggerBackupRequest(), + await client.get_data_source_reference( + datasourcereference.GetDataSourceReferenceRequest(), name="name_value", - rule_id="rule_id_value", ) @pytest.mark.parametrize( "request_type", [ - datasourcereference.GetDataSourceReferenceRequest, + datasourcereference.ListDataSourceReferencesRequest, dict, ], ) -def test_get_data_source_reference(request_type, transport: str = "grpc"): +def test_list_data_source_references(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14240,34 +15168,26 @@ def test_get_data_source_reference(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_source_reference), "__call__" + type(client.transport.list_data_source_references), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = datasourcereference.DataSourceReference( - name="name_value", - data_source="data_source_value", - data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, - data_source_backup_count=2535, + call.return_value = datasourcereference.ListDataSourceReferencesResponse( + next_page_token="next_page_token_value", ) - response = client.get_data_source_reference(request) + response = client.list_data_source_references(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = datasourcereference.GetDataSourceReferenceRequest() + request = datasourcereference.ListDataSourceReferencesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, datasourcereference.DataSourceReference) - assert response.name == "name_value" - assert response.data_source == "data_source_value" - assert ( - response.data_source_backup_config_state == backupvault.BackupConfigState.ACTIVE - ) - assert response.data_source_backup_count == 2535 + assert isinstance(response, pagers.ListDataSourceReferencesPager) + assert response.next_page_token == "next_page_token_value" -def test_get_data_source_reference_non_empty_request_with_auto_populated_field(): +def test_list_data_source_references_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -14278,26 +15198,32 @@ def test_get_data_source_reference_non_empty_request_with_auto_populated_field() # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = datasourcereference.GetDataSourceReferenceRequest( - name="name_value", + request = datasourcereference.ListDataSourceReferencesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_source_reference), "__call__" + type(client.transport.list_data_source_references), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_data_source_reference(request=request) + client.list_data_source_references(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == datasourcereference.GetDataSourceReferenceRequest( - name="name_value", + assert args[0] == datasourcereference.ListDataSourceReferencesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_get_data_source_reference_use_cached_wrapped_rpc(): +def test_list_data_source_references_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14312,7 +15238,7 @@ def test_get_data_source_reference_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_data_source_reference + client._transport.list_data_source_references in client._transport._wrapped_methods ) @@ -14322,15 +15248,15 @@ def test_get_data_source_reference_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_data_source_reference + client._transport.list_data_source_references ] = mock_rpc request = {} - client.get_data_source_reference(request) + client.list_data_source_references(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_source_reference(request) + client.list_data_source_references(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14338,7 +15264,7 @@ def test_get_data_source_reference_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_data_source_reference_async_use_cached_wrapped_rpc( +async def test_list_data_source_references_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -14355,7 +15281,7 @@ async def test_get_data_source_reference_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_data_source_reference + client._client._transport.list_data_source_references in client._client._transport._wrapped_methods ) @@ -14363,16 +15289,16 @@ async def test_get_data_source_reference_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_data_source_reference + client._client._transport.list_data_source_references ] = mock_rpc request = {} - await client.get_data_source_reference(request) + await client.list_data_source_references(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_data_source_reference(request) + await client.list_data_source_references(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -14380,9 +15306,9 @@ async def test_get_data_source_reference_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_data_source_reference_async( +async def test_list_data_source_references_async( transport: str = "grpc_asyncio", - request_type=datasourcereference.GetDataSourceReferenceRequest, + request_type=datasourcereference.ListDataSourceReferencesRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -14395,57 +15321,49 @@ async def test_get_data_source_reference_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_source_reference), "__call__" + type(client.transport.list_data_source_references), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datasourcereference.DataSourceReference( - name="name_value", - data_source="data_source_value", - data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, - data_source_backup_count=2535, + datasourcereference.ListDataSourceReferencesResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_data_source_reference(request) + response = await client.list_data_source_references(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = datasourcereference.GetDataSourceReferenceRequest() + request = datasourcereference.ListDataSourceReferencesRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, datasourcereference.DataSourceReference) - assert response.name == "name_value" - assert response.data_source == "data_source_value" - assert ( - response.data_source_backup_config_state == backupvault.BackupConfigState.ACTIVE - ) - assert response.data_source_backup_count == 2535 + assert isinstance(response, pagers.ListDataSourceReferencesAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_data_source_reference_async_from_dict(): - await test_get_data_source_reference_async(request_type=dict) +async def test_list_data_source_references_async_from_dict(): + await test_list_data_source_references_async(request_type=dict) -def test_get_data_source_reference_field_headers(): +def test_list_data_source_references_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datasourcereference.GetDataSourceReferenceRequest() + request = datasourcereference.ListDataSourceReferencesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_source_reference), "__call__" + type(client.transport.list_data_source_references), "__call__" ) as call: - call.return_value = datasourcereference.DataSourceReference() - client.get_data_source_reference(request) + call.return_value = datasourcereference.ListDataSourceReferencesResponse() + client.list_data_source_references(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -14456,30 +15374,30 @@ def test_get_data_source_reference_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_data_source_reference_field_headers_async(): +async def test_list_data_source_references_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = datasourcereference.GetDataSourceReferenceRequest() + request = datasourcereference.ListDataSourceReferencesRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_source_reference), "__call__" + type(client.transport.list_data_source_references), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datasourcereference.DataSourceReference() + datasourcereference.ListDataSourceReferencesResponse() ) - await client.get_data_source_reference(request) + await client.list_data_source_references(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -14490,37 +15408,37 @@ async def test_get_data_source_reference_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_data_source_reference_flattened(): +def test_list_data_source_references_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_source_reference), "__call__" + type(client.transport.list_data_source_references), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = datasourcereference.DataSourceReference() + call.return_value = datasourcereference.ListDataSourceReferencesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_data_source_reference( - name="name_value", + client.list_data_source_references( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_data_source_reference_flattened_error(): +def test_list_data_source_references_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -14528,56 +15446,264 @@ def test_get_data_source_reference_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_data_source_reference( - datasourcereference.GetDataSourceReferenceRequest(), - name="name_value", + client.list_data_source_references( + datasourcereference.ListDataSourceReferencesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_source_references_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_source_references), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasourcereference.ListDataSourceReferencesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.ListDataSourceReferencesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_source_references( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_source_references_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_source_references( + datasourcereference.ListDataSourceReferencesRequest(), + parent="parent_value", + ) + + +def test_list_data_source_references_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_source_references), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_source_references( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, datasourcereference.DataSourceReference) for i in results + ) + + +def test_list_data_source_references_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_source_references), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_source_references(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_source_references_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_source_references), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_source_references( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, datasourcereference.DataSourceReference) for i in responses ) @pytest.mark.asyncio -async def test_get_data_source_reference_flattened_async(): +async def test_list_data_source_references_async_pages(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_data_source_reference), "__call__" + type(client.transport.list_data_source_references), + "__call__", + new_callable=mock.AsyncMock, ) as call: - # Designate an appropriate return value for the call. - call.return_value = datasourcereference.DataSourceReference() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - datasourcereference.DataSourceReference() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_data_source_reference( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_data_source_reference_flattened_error_async(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_data_source_reference( - datasourcereference.GetDataSourceReferenceRequest(), - name="name_value", + # Set the response to a series of pages. + call.side_effect = ( + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_source_references(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( @@ -17940,25 +19066,212 @@ def test_get_data_source_rest_use_cached_wrapped_rpc(): client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc request = {} - client.get_data_source(request) + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_source_rest_required_fields( + request_type=backupvault.GetDataSourceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_data_source_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + backupvault.GetDataSourceRequest(), + name="name_value", + ) + + +def test_update_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_source in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_data_source + ] = mock_rpc + + request = {} + client.update_data_source(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_source(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_data_source_rest_required_fields( - request_type=backupvault.GetDataSourceRequest, +def test_update_data_source_rest_required_fields( + request_type=backupvault.UpdateDataSourceRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17969,21 +19282,25 @@ def test_get_data_source_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_source._get_unset_required_fields(jsonified_request) + ).update_data_source._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_data_source._get_unset_required_fields(jsonified_request) + ).update_data_source._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17992,7 +19309,7 @@ def test_get_data_source_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupvault.DataSource() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18004,39 +19321,51 @@ def test_get_data_source_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.DataSource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_source(request) + response = client.update_data_source(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_data_source_rest_unset_required_fields(): +def test_update_data_source_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_data_source._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_data_source._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "dataSource", + ) + ) + ) -def test_get_data_source_rest_flattened(): +def test_update_data_source_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18045,43 +19374,44 @@ def test_get_data_source_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.DataSource() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.DataSource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_source(**mock_args) + client.update_data_source(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" + "%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" % client.transport._host, args[1], ) -def test_get_data_source_rest_flattened_error(transport: str = "rest"): +def test_update_data_source_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18090,13 +19420,14 @@ def test_get_data_source_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_data_source( - backupvault.GetDataSourceRequest(), - name="name_value", + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_data_source_rest_use_cached_wrapped_rpc(): +def test_list_backups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18110,42 +19441,33 @@ def test_update_data_source_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_data_source in client._transport._wrapped_methods - ) + assert client._transport.list_backups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_data_source - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc request = {} - client.update_data_source(request) + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_source(request) + client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_data_source_rest_required_fields( - request_type=backupvault.UpdateDataSourceRequest, -): +def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): transport_class = transports.BackupDRRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18156,25 +19478,31 @@ def test_update_data_source_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_source._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_source._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", + "view", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18183,7 +19511,7 @@ def test_update_data_source_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListBackupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18195,51 +19523,50 @@ def test_update_data_source_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_source(request) + response = client.list_backups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_data_source_rest_unset_required_fields(): +def test_list_backups_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_data_source._get_unset_required_fields({}) + unset_fields = transport.list_backups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "dataSource", + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", ) ) + & set(("parent",)) ) -def test_update_data_source_rest_flattened(): +def test_list_backups_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18248,44 +19575,43 @@ def test_update_data_source_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListBackupsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "data_source": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" - } + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" } # get truthy value for each flattened field mock_args = dict( - data_source=backupvault.DataSource(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_source(**mock_args) + client.list_backups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" + "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" % client.transport._host, args[1], ) -def test_update_data_source_rest_flattened_error(transport: str = "rest"): +def test_list_backups_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18294,14 +19620,76 @@ def test_update_data_source_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_source( - backupvault.UpdateDataSourceRequest(), - data_source=backupvault.DataSource(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", ) -def test_list_backups_rest_use_cached_wrapped_rpc(): +def test_list_backups_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_fetch_backups_for_resource_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18315,33 +19703,41 @@ def test_list_backups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods + assert ( + client._transport.fetch_backups_for_resource_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + client._transport._wrapped_methods[ + client._transport.fetch_backups_for_resource_type + ] = mock_rpc request = {} - client.list_backups(request) + client.fetch_backups_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backups(request) + client.fetch_backups_for_resource_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): +def test_fetch_backups_for_resource_type_rest_required_fields( + request_type=backupvault.FetchBackupsForResourceTypeRequest, +): transport_class = transports.BackupDRRestTransport request_init = {} request_init["parent"] = "" + request_init["resource_type"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18349,19 +19745,23 @@ def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsR ) # verify fields with default values are dropped + assert "resourceType" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).fetch_backups_for_resource_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "resourceType" in jsonified_request + assert jsonified_request["resourceType"] == request_init["resource_type"] jsonified_request["parent"] = "parent_value" + jsonified_request["resourceType"] = "resource_type_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).fetch_backups_for_resource_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -18369,6 +19769,7 @@ def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsR "order_by", "page_size", "page_token", + "resource_type", "view", ) ) @@ -18377,6 +19778,8 @@ def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsR # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "resourceType" in jsonified_request + assert jsonified_request["resourceType"] == "resource_type_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18385,7 +19788,7 @@ def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse() + return_value = backupvault.FetchBackupsForResourceTypeResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -18406,26 +19809,36 @@ def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsR response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) + return_value = backupvault.FetchBackupsForResourceTypeResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) + response = client.fetch_backups_for_resource_type(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "resourceType", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backups_rest_unset_required_fields(): +def test_fetch_backups_for_resource_type_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backups._get_unset_required_fields({}) + unset_fields = transport.fetch_backups_for_resource_type._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( set( ( @@ -18433,14 +19846,20 @@ def test_list_backups_rest_unset_required_fields(): "orderBy", "pageSize", "pageToken", + "resourceType", "view", ) ) - & set(("parent",)) + & set( + ( + "parent", + "resourceType", + ) + ) ) -def test_list_backups_rest_flattened(): +def test_fetch_backups_for_resource_type_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18449,7 +19868,7 @@ def test_list_backups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse() + return_value = backupvault.FetchBackupsForResourceTypeResponse() # get arguments that satisfy an http rule for this method sample_request = { @@ -18459,6 +19878,7 @@ def test_list_backups_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + resource_type="resource_type_value", ) mock_args.update(sample_request) @@ -18466,26 +19886,26 @@ def test_list_backups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) + return_value = backupvault.FetchBackupsForResourceTypeResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(**mock_args) + client.fetch_backups_for_resource_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" + "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups:fetchForResourceType" % client.transport._host, args[1], ) -def test_list_backups_rest_flattened_error(transport: str = "rest"): +def test_fetch_backups_for_resource_type_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18494,13 +19914,14 @@ def test_list_backups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backups( - backupvault.ListBackupsRequest(), + client.fetch_backups_for_resource_type( + backupvault.FetchBackupsForResourceTypeRequest(), parent="parent_value", + resource_type="resource_type_value", ) -def test_list_backups_rest_pager(transport: str = "rest"): +def test_fetch_backups_for_resource_type_rest_pager(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18512,7 +19933,7 @@ def test_list_backups_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - backupvault.ListBackupsResponse( + backupvault.FetchBackupsForResourceTypeResponse( backups=[ backupvault.Backup(), backupvault.Backup(), @@ -18520,17 +19941,17 @@ def test_list_backups_rest_pager(transport: str = "rest"): ], next_page_token="abc", ), - backupvault.ListBackupsResponse( + backupvault.FetchBackupsForResourceTypeResponse( backups=[], next_page_token="def", ), - backupvault.ListBackupsResponse( + backupvault.FetchBackupsForResourceTypeResponse( backups=[ backupvault.Backup(), ], next_page_token="ghi", ), - backupvault.ListBackupsResponse( + backupvault.FetchBackupsForResourceTypeResponse( backups=[ backupvault.Backup(), backupvault.Backup(), @@ -18541,7 +19962,9 @@ def test_list_backups_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) + response = tuple( + backupvault.FetchBackupsForResourceTypeResponse.to_json(x) for x in response + ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -18552,13 +19975,15 @@ def test_list_backups_rest_pager(transport: str = "rest"): "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" } - pager = client.list_backups(request=sample_request) + pager = client.fetch_backups_for_resource_type(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, backupvault.Backup) for i in results) - pages = list(client.list_backups(request=sample_request).pages) + pages = list( + client.fetch_backups_for_resource_type(request=sample_request).pages + ) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -22573,20 +23998,292 @@ def test_get_data_source_reference_rest_flattened(): ) -def test_get_data_source_reference_rest_flattened_error(transport: str = "rest"): +def test_get_data_source_reference_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source_reference( + datasourcereference.GetDataSourceReferenceRequest(), + name="name_value", + ) + + +def test_list_data_source_references_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_data_source_references + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_data_source_references + ] = mock_rpc + + request = {} + client.list_data_source_references(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_source_references(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_source_references_rest_required_fields( + request_type=datasourcereference.ListDataSourceReferencesRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_source_references._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_source_references._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datasourcereference.ListDataSourceReferencesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datasourcereference.ListDataSourceReferencesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_data_source_references(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_source_references_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_source_references._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_data_source_references_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasourcereference.ListDataSourceReferencesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasourcereference.ListDataSourceReferencesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_data_source_references(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dataSourceReferences" + % client.transport._host, + args[1], + ) + + +def test_list_data_source_references_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_source_references( + datasourcereference.ListDataSourceReferencesRequest(), + parent="parent_value", + ) + + +def test_list_data_source_references_rest_pager(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_source_reference( - datasourcereference.GetDataSourceReferenceRequest(), - name="name_value", + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.ListDataSourceReferencesResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datasourcereference.ListDataSourceReferencesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_data_source_references(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, datasourcereference.DataSourceReference) for i in results ) + pages = list(client.list_data_source_references(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + def test_fetch_data_source_references_for_resource_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -23468,6 +25165,29 @@ def test_list_backups_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_backups_for_resource_type_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: + call.return_value = backupvault.FetchBackupsForResourceTypeResponse() + client.fetch_backups_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.FetchBackupsForResourceTypeRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_backup_empty_call_grpc(): @@ -23898,6 +25618,29 @@ def test_get_data_source_reference_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_source_references_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_source_references), "__call__" + ) as call: + call.return_value = datasourcereference.ListDataSourceReferencesResponse() + client.list_data_source_references(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datasourcereference.ListDataSourceReferencesRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_fetch_data_source_references_for_resource_type_empty_call_grpc(): @@ -24378,6 +26121,35 @@ async def test_list_backups_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_fetch_backups_for_resource_type_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchBackupsForResourceTypeResponse( + next_page_token="next_page_token_value", + ) + ) + await client.fetch_backups_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.FetchBackupsForResourceTypeRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -24920,6 +26692,7 @@ async def test_get_data_source_reference_empty_call_grpc_asyncio(): data_source="data_source_value", data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, data_source_backup_count=2535, + total_stored_bytes=1946, ) ) await client.get_data_source_reference(request=None) @@ -24932,6 +26705,35 @@ async def test_get_data_source_reference_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_source_references_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_source_references), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.ListDataSourceReferencesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_data_source_references(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datasourcereference.ListDataSourceReferencesRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -27018,40 +28820,171 @@ def get_message_fields(field): } ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["data_source"][field])): - del request_init["data_source"][field][i][subfield] - else: - del request_init["data_source"][field][subfield] + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_source"][field])): + del request_init["data_source"][field][i][subfield] + else: + del request_init["data_source"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_data_source(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_source_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_data_source" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_data_source_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_update_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupvault.UpdateDataSourceRequest.pb( + backupvault.UpdateDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = backupvault.UpdateDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_backups_rest_bad_request(request_type=backupvault.ListBackupsRequest): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backups(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupsRequest, + dict, + ], +) +def test_list_backups_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_source(request) + response = client.list_backups(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListBackupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_source_rest_interceptors(null_interceptor): +def test_list_backups_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -27063,20 +28996,16 @@ def test_update_data_source_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_update_data_source" + transports.BackupDRRestInterceptor, "post_list_backups" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_update_data_source_with_metadata" + transports.BackupDRRestInterceptor, "post_list_backups_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_update_data_source" + transports.BackupDRRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.UpdateDataSourceRequest.pb( - backupvault.UpdateDataSourceRequest() - ) + pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -27087,19 +29016,21 @@ def test_update_data_source_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backupvault.ListBackupsResponse.to_json( + backupvault.ListBackupsResponse() + ) req.return_value.content = return_value - request = backupvault.UpdateDataSourceRequest() + request = backupvault.ListBackupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backupvault.ListBackupsResponse() + post_with_metadata.return_value = backupvault.ListBackupsResponse(), metadata - client.update_data_source( + client.list_backups( request, metadata=[ ("key", "val"), @@ -27112,7 +29043,9 @@ def test_update_data_source_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_backups_rest_bad_request(request_type=backupvault.ListBackupsRequest): +def test_fetch_backups_for_resource_type_rest_bad_request( + request_type=backupvault.FetchBackupsForResourceTypeRequest, +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -27134,17 +29067,17 @@ def test_list_backups_rest_bad_request(request_type=backupvault.ListBackupsReque response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(request) + client.fetch_backups_for_resource_type(request) @pytest.mark.parametrize( "request_type", [ - backupvault.ListBackupsRequest, + backupvault.FetchBackupsForResourceTypeRequest, dict, ], ) -def test_list_backups_rest_call_success(request_type): +def test_fetch_backups_for_resource_type_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -27158,9 +29091,8 @@ def test_list_backups_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse( + return_value = backupvault.FetchBackupsForResourceTypeResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -27168,21 +29100,20 @@ def test_list_backups_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) + return_value = backupvault.FetchBackupsForResourceTypeResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) + response = client.fetch_backups_for_resource_type(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) + assert isinstance(response, pagers.FetchBackupsForResourceTypePager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): +def test_fetch_backups_for_resource_type_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -27194,16 +29125,19 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backups" + transports.BackupDRRestInterceptor, "post_fetch_backups_for_resource_type" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backups_with_metadata" + transports.BackupDRRestInterceptor, + "post_fetch_backups_for_resource_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_backups" + transports.BackupDRRestInterceptor, "pre_fetch_backups_for_resource_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) + pb_message = backupvault.FetchBackupsForResourceTypeRequest.pb( + backupvault.FetchBackupsForResourceTypeRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -27214,21 +29148,24 @@ def test_list_backups_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.ListBackupsResponse.to_json( - backupvault.ListBackupsResponse() + return_value = backupvault.FetchBackupsForResourceTypeResponse.to_json( + backupvault.FetchBackupsForResourceTypeResponse() ) req.return_value.content = return_value - request = backupvault.ListBackupsRequest() + request = backupvault.FetchBackupsForResourceTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupvault.ListBackupsResponse() - post_with_metadata.return_value = backupvault.ListBackupsResponse(), metadata + post.return_value = backupvault.FetchBackupsForResourceTypeResponse() + post_with_metadata.return_value = ( + backupvault.FetchBackupsForResourceTypeResponse(), + metadata, + ) - client.list_backups( + client.fetch_backups_for_resource_type( request, metadata=[ ("key", "val"), @@ -27557,7 +29494,9 @@ def test_update_backup_rest_call_success(request_type): "database_installed_version": "database_installed_version_value", "final_backup": True, "source_instance": "source_instance_value", + "instance_create_time": {}, "instance_tier": "instance_tier_value", + "instance_delete_time": {}, }, "backup_appliance_backup_properties": { "generation_id": 1368, @@ -27587,6 +29526,11 @@ def test_update_backup_rest_call_success(request_type): "resource_size_bytes": 2056, "satisfies_pzs": True, "satisfies_pzi": True, + "gcp_resource": { + "gcp_resourcename": "gcp_resourcename_value", + "location": "location_value", + "type_": "type__value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -30298,6 +32242,7 @@ def test_get_data_source_reference_rest_call_success(request_type): data_source="data_source_value", data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, data_source_backup_count=2535, + total_stored_bytes=1946, ) # Wrap the value into a proper Response obj @@ -30320,6 +32265,7 @@ def test_get_data_source_reference_rest_call_success(request_type): response.data_source_backup_config_state == backupvault.BackupConfigState.ACTIVE ) assert response.data_source_backup_count == 2535 + assert response.total_stored_bytes == 1946 @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -30388,6 +32334,139 @@ def test_get_data_source_reference_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_list_data_source_references_rest_bad_request( + request_type=datasourcereference.ListDataSourceReferencesRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_source_references(request) + + +@pytest.mark.parametrize( + "request_type", + [ + datasourcereference.ListDataSourceReferencesRequest, + dict, + ], +) +def test_list_data_source_references_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasourcereference.ListDataSourceReferencesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datasourcereference.ListDataSourceReferencesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_source_references(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourceReferencesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_source_references_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_data_source_references" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_list_data_source_references_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_data_source_references" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = datasourcereference.ListDataSourceReferencesRequest.pb( + datasourcereference.ListDataSourceReferencesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = datasourcereference.ListDataSourceReferencesResponse.to_json( + datasourcereference.ListDataSourceReferencesResponse() + ) + req.return_value.content = return_value + + request = datasourcereference.ListDataSourceReferencesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datasourcereference.ListDataSourceReferencesResponse() + post_with_metadata.return_value = ( + datasourcereference.ListDataSourceReferencesResponse(), + metadata, + ) + + client.list_data_source_references( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_fetch_data_source_references_for_resource_type_rest_bad_request( request_type=datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, ): @@ -31526,6 +33605,28 @@ def test_list_backups_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_backups_for_resource_type_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backups_for_resource_type), "__call__" + ) as call: + client.fetch_backups_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.FetchBackupsForResourceTypeRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_backup_empty_call_rest(): @@ -31935,6 +34036,28 @@ def test_get_data_source_reference_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_source_references_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_source_references), "__call__" + ) as call: + client.list_data_source_references(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datasourcereference.ListDataSourceReferencesRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_fetch_data_source_references_for_resource_type_empty_call_rest(): @@ -32046,6 +34169,7 @@ def test_backup_dr_base_transport(): "get_data_source", "update_data_source", "list_backups", + "fetch_backups_for_resource_type", "get_backup", "update_backup", "delete_backup", @@ -32065,6 +34189,7 @@ def test_backup_dr_base_transport(): "delete_backup_plan_association", "trigger_backup", "get_data_source_reference", + "list_data_source_references", "fetch_data_source_references_for_resource_type", "initialize_service", "set_iam_policy", @@ -32378,6 +34503,9 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.list_backups._session session2 = client2.transport.list_backups._session assert session1 != session2 + session1 = client1.transport.fetch_backups_for_resource_type._session + session2 = client2.transport.fetch_backups_for_resource_type._session + assert session1 != session2 session1 = client1.transport.get_backup._session session2 = client2.transport.get_backup._session assert session1 != session2 @@ -32439,6 +34567,9 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.get_data_source_reference._session session2 = client2.transport.get_data_source_reference._session assert session1 != session2 + session1 = client1.transport.list_data_source_references._session + session2 = client2.transport.list_data_source_references._session + assert session1 != session2 session1 = client1.transport.fetch_data_source_references_for_resource_type._session session2 = client2.transport.fetch_data_source_references_for_resource_type._session assert session1 != session2 diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/__init__.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/__init__.py index 7b5f2a9bd950..8000f7f9d3a6 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/__init__.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub/__init__.py @@ -66,6 +66,7 @@ Routine, SharedResourceType, SharingEnvironmentConfig, + StoredProcedureConfig, SubmitQueryTemplateRequest, SubscribeDataExchangeRequest, SubscribeDataExchangeResponse, @@ -130,6 +131,7 @@ "RevokeSubscriptionResponse", "Routine", "SharingEnvironmentConfig", + "StoredProcedureConfig", "SubmitQueryTemplateRequest", "SubscribeDataExchangeRequest", "SubscribeDataExchangeResponse", diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/__init__.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/__init__.py index 76b7dd757a91..771a199995ef 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/__init__.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/__init__.py @@ -64,6 +64,7 @@ Routine, SharedResourceType, SharingEnvironmentConfig, + StoredProcedureConfig, SubmitQueryTemplateRequest, SubscribeDataExchangeRequest, SubscribeDataExchangeResponse, @@ -139,6 +140,7 @@ "Routine", "SharedResourceType", "SharingEnvironmentConfig", + "StoredProcedureConfig", "SubmitQueryTemplateRequest", "SubscribeDataExchangeRequest", "SubscribeDataExchangeResponse", diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/__init__.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/__init__.py index bc9a2baa326d..9a82ae786c7f 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/__init__.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/__init__.py @@ -55,6 +55,7 @@ Routine, SharedResourceType, SharingEnvironmentConfig, + StoredProcedureConfig, SubmitQueryTemplateRequest, SubscribeDataExchangeRequest, SubscribeDataExchangeResponse, @@ -117,6 +118,7 @@ "RevokeSubscriptionResponse", "Routine", "SharingEnvironmentConfig", + "StoredProcedureConfig", "SubmitQueryTemplateRequest", "SubscribeDataExchangeRequest", "SubscribeDataExchangeResponse", diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py index 53fcaa10a38d..6cd91f3f7806 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/analyticshub.py @@ -47,6 +47,7 @@ "DestinationDataset", "DestinationPubSubSubscription", "Listing", + "StoredProcedureConfig", "Subscription", "ListDataExchangesRequest", "ListDataExchangesResponse", @@ -855,6 +856,10 @@ class Listing(proto.Message): Optional. If set, restricted export configuration will be propagated and enforced on the linked dataset. + stored_procedure_config (google.cloud.bigquery_analyticshub_v1.types.StoredProcedureConfig): + Optional. If set, stored procedure + configuration will be propagated and enforced on + the linked dataset. discovery_type (google.cloud.bigquery_analyticshub_v1.types.DiscoveryType): Optional. Type of discovery of the listing on the discovery page. @@ -1360,6 +1365,11 @@ class CommercialState(proto.Enum): number=13, message=RestrictedExportConfig, ) + stored_procedure_config: "StoredProcedureConfig" = proto.Field( + proto.MESSAGE, + number=20, + message="StoredProcedureConfig", + ) discovery_type: "DiscoveryType" = proto.Field( proto.ENUM, number=14, @@ -1389,6 +1399,44 @@ class CommercialState(proto.Enum): ) +class StoredProcedureConfig(proto.Message): + r"""Stored procedure configuration, used to configure stored + procedure sharing on linked dataset. + + Attributes: + enabled (bool): + Optional. If true, enable sharing of stored + procedure. + allowed_stored_procedure_types (MutableSequence[google.cloud.bigquery_analyticshub_v1.types.StoredProcedureConfig.StoredProcedureType]): + Output only. Types of stored procedure + supported to share. + """ + + class StoredProcedureType(proto.Enum): + r"""Enum to specify the type of stored procedure to share. + + Values: + STORED_PROCEDURE_TYPE_UNSPECIFIED (0): + Default value. This value is unused. + SQL_PROCEDURE (1): + SQL stored procedure. + """ + STORED_PROCEDURE_TYPE_UNSPECIFIED = 0 + SQL_PROCEDURE = 1 + + enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + allowed_stored_procedure_types: MutableSequence[ + StoredProcedureType + ] = proto.RepeatedField( + proto.ENUM, + number=2, + enum=StoredProcedureType, + ) + + class Subscription(proto.Message): r"""A subscription represents a subscribers' access to a particular set of published data. It contains references to diff --git a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/pubsub.py b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/pubsub.py index 7c25a052b7e9..5b46a55e5913 100644 --- a/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/pubsub.py +++ b/packages/google-cloud-bigquery-analyticshub/google/cloud/bigquery_analyticshub_v1/types/pubsub.py @@ -175,6 +175,13 @@ class PubSubSubscription(proto.Message): messages before they are delivered to subscribers. Transforms are applied in the order specified. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag + keys/values directly bound to this resource. For + example: + + "123/environment": "production", + "123/costCenter": "marketing". """ name: str = proto.Field( @@ -250,6 +257,11 @@ class PubSubSubscription(proto.Message): number=25, message="MessageTransform", ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=26, + ) class RetryPolicy(proto.Message): diff --git a/packages/google-cloud-bigquery-analyticshub/noxfile.py b/packages/google-cloud-bigquery-analyticshub/noxfile.py index 944d89c0c1f3..fca276edf046 100644 --- a/packages/google-cloud-bigquery-analyticshub/noxfile.py +++ b/packages/google-cloud-bigquery-analyticshub/noxfile.py @@ -27,6 +27,10 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") + ALL_PYTHON = [ "3.7", "3.8", diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_approve_query_template_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_approve_query_template_async.py index 60cc0009e644..03d251d40e8e 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_approve_query_template_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_approve_query_template_async.py @@ -49,4 +49,5 @@ async def sample_approve_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ApproveQueryTemplate_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_approve_query_template_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_approve_query_template_sync.py index bdfab9e2adf0..6597802c5e13 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_approve_query_template_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_approve_query_template_sync.py @@ -49,4 +49,5 @@ def sample_approve_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ApproveQueryTemplate_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py index f1a84ecf0ee0..1ca972897926 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_async.py @@ -54,4 +54,5 @@ async def sample_create_data_exchange(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_CreateDataExchange_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py index 6332f53a5d4a..5344bc3e398d 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_data_exchange_sync.py @@ -54,4 +54,5 @@ def sample_create_data_exchange(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_CreateDataExchange_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_async.py index f2ba1336d296..e949ff4e4fdd 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_async.py @@ -54,4 +54,5 @@ async def sample_create_listing(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_CreateListing_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py index e9e38f795eee..1e14bdc79ef1 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_listing_sync.py @@ -54,4 +54,5 @@ def sample_create_listing(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_CreateListing_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_query_template_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_query_template_async.py index 98d465afc10e..af8e36edb37b 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_query_template_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_query_template_async.py @@ -54,4 +54,5 @@ async def sample_create_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_CreateQueryTemplate_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_query_template_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_query_template_sync.py index 94bb9c36c345..0fc850121897 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_query_template_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_create_query_template_sync.py @@ -54,4 +54,5 @@ def sample_create_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_CreateQueryTemplate_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py index 699eebcc5e63..79fd52078610 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_async.py @@ -53,4 +53,5 @@ async def sample_delete_subscription(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py index eb140afe157b..1db317a26725 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_delete_subscription_sync.py @@ -53,4 +53,5 @@ def sample_delete_subscription(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_DeleteSubscription_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py index 950789ba91b1..10d3cad92a0f 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_async.py @@ -49,4 +49,5 @@ async def sample_get_data_exchange(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetDataExchange_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py index b369b8aa8fc6..49492560b07d 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_data_exchange_sync.py @@ -49,4 +49,5 @@ def sample_get_data_exchange(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetDataExchange_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py index f6318c01cbd2..2bbd3eb0e74c 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_async.py @@ -23,6 +23,8 @@ # python3 -m pip install google-cloud-bigquery-analyticshub +from google.iam.v1 import iam_policy_pb2 # type: ignore + # [START analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_async] # This snippet has been automatically generated and should be regarded as a # code template only. @@ -32,7 +34,6 @@ # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore async def sample_get_iam_policy(): @@ -50,4 +51,5 @@ async def sample_get_iam_policy(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py index bf8edb049624..5e782f64e0cb 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_iam_policy_sync.py @@ -23,6 +23,8 @@ # python3 -m pip install google-cloud-bigquery-analyticshub +from google.iam.v1 import iam_policy_pb2 # type: ignore + # [START analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_sync] # This snippet has been automatically generated and should be regarded as a # code template only. @@ -32,7 +34,6 @@ # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore def sample_get_iam_policy(): @@ -50,4 +51,5 @@ def sample_get_iam_policy(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetIamPolicy_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_async.py index ff29ccb83fe4..98e75708f4cd 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_async.py @@ -49,4 +49,5 @@ async def sample_get_listing(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetListing_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py index a054d17a5176..17662aedae56 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_listing_sync.py @@ -49,4 +49,5 @@ def sample_get_listing(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetListing_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_query_template_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_query_template_async.py index 7daf395c88eb..40ca39ba02a2 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_query_template_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_query_template_async.py @@ -49,4 +49,5 @@ async def sample_get_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetQueryTemplate_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_query_template_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_query_template_sync.py index 169b8b97af78..e5819133606b 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_query_template_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_query_template_sync.py @@ -49,4 +49,5 @@ def sample_get_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetQueryTemplate_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py index a2ef84636875..1b92c60bba9e 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_async.py @@ -49,4 +49,5 @@ async def sample_get_subscription(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetSubscription_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py index aaf45d726c4f..603441da7993 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_get_subscription_sync.py @@ -49,4 +49,5 @@ def sample_get_subscription(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_GetSubscription_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py index 933ebd39d3b6..3f4bd50f6ffb 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_async.py @@ -50,4 +50,5 @@ async def sample_list_data_exchanges(): async for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py index 118a6720c22b..195a6f03a65b 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_data_exchanges_sync.py @@ -50,4 +50,5 @@ def sample_list_data_exchanges(): for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListDataExchanges_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_async.py index f920c98e6fed..8bae97e2a75d 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_async.py @@ -50,4 +50,5 @@ async def sample_list_listings(): async for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListListings_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py index 87562c0127d8..57bee85c5a96 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_listings_sync.py @@ -50,4 +50,5 @@ def sample_list_listings(): for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListListings_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py index c767dac60936..e6292063fcd0 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_async.py @@ -50,4 +50,5 @@ async def sample_list_org_data_exchanges(): async for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py index 5847843ae958..f6b0b1a65d25 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_org_data_exchanges_sync.py @@ -50,4 +50,5 @@ def sample_list_org_data_exchanges(): for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListOrgDataExchanges_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_query_templates_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_query_templates_async.py index 632999e38916..1f86d7c85135 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_query_templates_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_query_templates_async.py @@ -50,4 +50,5 @@ async def sample_list_query_templates(): async for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListQueryTemplates_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_query_templates_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_query_templates_sync.py index a91433572f04..f58ddbd06082 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_query_templates_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_query_templates_sync.py @@ -50,4 +50,5 @@ def sample_list_query_templates(): for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListQueryTemplates_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py index 2d6069e4a053..f35cd27999e6 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_async.py @@ -50,4 +50,5 @@ async def sample_list_shared_resource_subscriptions(): async for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py index 63b1c9bb2f6b..64fdf32e5e16 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_shared_resource_subscriptions_sync.py @@ -50,4 +50,5 @@ def sample_list_shared_resource_subscriptions(): for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListSharedResourceSubscriptions_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py index d7d1ce0657d7..719b8b7cad99 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_async.py @@ -50,4 +50,5 @@ async def sample_list_subscriptions(): async for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py index 01158a3aca9b..4164388d71a5 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_list_subscriptions_sync.py @@ -50,4 +50,5 @@ def sample_list_subscriptions(): for response in page_result: print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_ListSubscriptions_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py index 2b959aeeeed9..39ca2f6bdc45 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_async.py @@ -53,4 +53,5 @@ async def sample_refresh_subscription(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py index 927da10e34c1..e353e3cd7660 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_refresh_subscription_sync.py @@ -53,4 +53,5 @@ def sample_refresh_subscription(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_RefreshSubscription_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py index 138d95d6c0df..ae01884faecb 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_async.py @@ -49,4 +49,5 @@ async def sample_revoke_subscription(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py index cbc814c367b1..1b406cdb7267 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_revoke_subscription_sync.py @@ -49,4 +49,5 @@ def sample_revoke_subscription(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_RevokeSubscription_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py index 9b0b1985dcf5..4e9741efe69a 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_async.py @@ -23,6 +23,8 @@ # python3 -m pip install google-cloud-bigquery-analyticshub +from google.iam.v1 import iam_policy_pb2 # type: ignore + # [START analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_async] # This snippet has been automatically generated and should be regarded as a # code template only. @@ -32,7 +34,6 @@ # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore async def sample_set_iam_policy(): @@ -50,4 +51,5 @@ async def sample_set_iam_policy(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py index 138970ebaa09..ccae9ffef9a7 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_set_iam_policy_sync.py @@ -23,6 +23,8 @@ # python3 -m pip install google-cloud-bigquery-analyticshub +from google.iam.v1 import iam_policy_pb2 # type: ignore + # [START analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_sync] # This snippet has been automatically generated and should be regarded as a # code template only. @@ -32,7 +34,6 @@ # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore def sample_set_iam_policy(): @@ -50,4 +51,5 @@ def sample_set_iam_policy(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_SetIamPolicy_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_submit_query_template_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_submit_query_template_async.py index bcc530a2577b..03a6cada9c0c 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_submit_query_template_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_submit_query_template_async.py @@ -49,4 +49,5 @@ async def sample_submit_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_SubmitQueryTemplate_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_submit_query_template_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_submit_query_template_sync.py index 2b705295e949..eca6694c7476 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_submit_query_template_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_submit_query_template_sync.py @@ -49,4 +49,5 @@ def sample_submit_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_SubmitQueryTemplate_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py index 9f7332636232..c096f494ea4d 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_async.py @@ -55,4 +55,5 @@ async def sample_subscribe_data_exchange(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py index 9ac5e34e5bb2..26fccd7f9879 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_data_exchange_sync.py @@ -55,4 +55,5 @@ def sample_subscribe_data_exchange(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_SubscribeDataExchange_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py index 52ad89bd627c..ca22bd8e1003 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_async.py @@ -55,4 +55,5 @@ async def sample_subscribe_listing(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_SubscribeListing_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py index cbbd2272024c..5711da14bfda 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_subscribe_listing_sync.py @@ -55,4 +55,5 @@ def sample_subscribe_listing(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_SubscribeListing_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py index ad290a9c79c0..243c5387ce7d 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_async.py @@ -23,6 +23,8 @@ # python3 -m pip install google-cloud-bigquery-analyticshub +from google.iam.v1 import iam_policy_pb2 # type: ignore + # [START analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_async] # This snippet has been automatically generated and should be regarded as a # code template only. @@ -32,7 +34,6 @@ # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore async def sample_test_iam_permissions(): @@ -42,7 +43,7 @@ async def sample_test_iam_permissions(): # Initialize request argument(s) request = iam_policy_pb2.TestIamPermissionsRequest( resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], + permissions=["permissions_value1", "permissions_value2"], ) # Make the request @@ -51,4 +52,5 @@ async def sample_test_iam_permissions(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py index ada1dbe5cefe..3aa0de2a6465 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_test_iam_permissions_sync.py @@ -23,6 +23,8 @@ # python3 -m pip install google-cloud-bigquery-analyticshub +from google.iam.v1 import iam_policy_pb2 # type: ignore + # [START analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_sync] # This snippet has been automatically generated and should be regarded as a # code template only. @@ -32,7 +34,6 @@ # client as shown in: # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import bigquery_analyticshub_v1 -from google.iam.v1 import iam_policy_pb2 # type: ignore def sample_test_iam_permissions(): @@ -42,7 +43,7 @@ def sample_test_iam_permissions(): # Initialize request argument(s) request = iam_policy_pb2.TestIamPermissionsRequest( resource="resource_value", - permissions=['permissions_value1', 'permissions_value2'], + permissions=["permissions_value1", "permissions_value2"], ) # Make the request @@ -51,4 +52,5 @@ def sample_test_iam_permissions(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_TestIamPermissions_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py index 61f61670971c..2602c2843e83 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_async.py @@ -52,4 +52,5 @@ async def sample_update_data_exchange(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_UpdateDataExchange_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py index 8ee2d71d9feb..db40ae6ebc0d 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_data_exchange_sync.py @@ -52,4 +52,5 @@ def sample_update_data_exchange(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_UpdateDataExchange_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_async.py index 7c4c3a2859a8..ffb7d007e929 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_async.py @@ -52,4 +52,5 @@ async def sample_update_listing(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_UpdateListing_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py index 190383d1fced..7ec9df9000d9 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_listing_sync.py @@ -52,4 +52,5 @@ def sample_update_listing(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_UpdateListing_sync] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_query_template_async.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_query_template_async.py index d0ffdf7bf5c8..9bb9dc91fb7c 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_query_template_async.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_query_template_async.py @@ -52,4 +52,5 @@ async def sample_update_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_UpdateQueryTemplate_async] diff --git a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_query_template_sync.py b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_query_template_sync.py index 4dc20fdeeab0..484415d4a2db 100644 --- a/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_query_template_sync.py +++ b/packages/google-cloud-bigquery-analyticshub/samples/generated_samples/analyticshub_v1_generated_analytics_hub_service_update_query_template_sync.py @@ -52,4 +52,5 @@ def sample_update_query_template(): # Handle the response print(response) + # [END analyticshub_v1_generated_AnalyticsHubService_UpdateQueryTemplate_sync] diff --git a/packages/google-cloud-cloudsecuritycompliance/docs/cloudsecuritycompliance_v1/monitoring.rst b/packages/google-cloud-cloudsecuritycompliance/docs/cloudsecuritycompliance_v1/monitoring.rst new file mode 100644 index 000000000000..4d9f179eb398 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/docs/cloudsecuritycompliance_v1/monitoring.rst @@ -0,0 +1,10 @@ +Monitoring +---------------------------- + +.. automodule:: google.cloud.cloudsecuritycompliance_v1.services.monitoring + :members: + :inherited-members: + +.. automodule:: google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-cloudsecuritycompliance/docs/cloudsecuritycompliance_v1/services_.rst b/packages/google-cloud-cloudsecuritycompliance/docs/cloudsecuritycompliance_v1/services_.rst index 80065d89c786..4b9aa6cfeb0c 100644 --- a/packages/google-cloud-cloudsecuritycompliance/docs/cloudsecuritycompliance_v1/services_.rst +++ b/packages/google-cloud-cloudsecuritycompliance/docs/cloudsecuritycompliance_v1/services_.rst @@ -7,3 +7,4 @@ Services for Google Cloud Cloudsecuritycompliance v1 API cm_enrollment_service config deployment + monitoring diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance/__init__.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance/__init__.py index 302f8b8c6666..b4c89c91615c 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance/__init__.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance/__init__.py @@ -38,6 +38,12 @@ from google.cloud.cloudsecuritycompliance_v1.services.deployment.client import ( DeploymentClient, ) +from google.cloud.cloudsecuritycompliance_v1.services.monitoring.async_client import ( + MonitoringAsyncClient, +) +from google.cloud.cloudsecuritycompliance_v1.services.monitoring.client import ( + MonitoringClient, +) from google.cloud.cloudsecuritycompliance_v1.types.audit import ( BucketDestination, CloudControlAuditDetails, @@ -126,6 +132,30 @@ TargetResourceConfig, TargetResourceCreationConfig, ) +from google.cloud.cloudsecuritycompliance_v1.types.monitoring import ( + AggregatedComplianceReport, + AggregateFrameworkComplianceReportRequest, + AggregateFrameworkComplianceReportResponse, + CloudControlAssessmentDetails, + CloudControlReport, + ControlAssessmentDetails, + ControlComplianceSummary, + EvaluationState, + FetchFrameworkComplianceReportRequest, + FindingClass, + FindingSummary, + FrameworkComplianceReport, + FrameworkComplianceSummary, + ListControlComplianceSummariesRequest, + ListControlComplianceSummariesResponse, + ListFindingSummariesRequest, + ListFindingSummariesResponse, + ListFrameworkComplianceSummariesRequest, + ListFrameworkComplianceSummariesResponse, + ManualCloudControlAssessmentDetails, + SimilarControls, + TargetResourceDetails, +) __all__ = ( "AuditClient", @@ -136,6 +166,8 @@ "ConfigAsyncClient", "DeploymentClient", "DeploymentAsyncClient", + "MonitoringClient", + "MonitoringAsyncClient", "BucketDestination", "CloudControlAuditDetails", "CloudControlGroupAuditDetails", @@ -214,4 +246,26 @@ "TargetResourceConfig", "TargetResourceCreationConfig", "DeploymentState", + "AggregatedComplianceReport", + "AggregateFrameworkComplianceReportRequest", + "AggregateFrameworkComplianceReportResponse", + "CloudControlAssessmentDetails", + "CloudControlReport", + "ControlAssessmentDetails", + "ControlComplianceSummary", + "FetchFrameworkComplianceReportRequest", + "FindingSummary", + "FrameworkComplianceReport", + "FrameworkComplianceSummary", + "ListControlComplianceSummariesRequest", + "ListControlComplianceSummariesResponse", + "ListFindingSummariesRequest", + "ListFindingSummariesResponse", + "ListFrameworkComplianceSummariesRequest", + "ListFrameworkComplianceSummariesResponse", + "ManualCloudControlAssessmentDetails", + "SimilarControls", + "TargetResourceDetails", + "EvaluationState", + "FindingClass", ) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/__init__.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/__init__.py index 60e3e13d2a7e..87e375596324 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/__init__.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/__init__.py @@ -25,6 +25,7 @@ ) from .services.config import ConfigAsyncClient, ConfigClient from .services.deployment import DeploymentAsyncClient, DeploymentClient +from .services.monitoring import MonitoringAsyncClient, MonitoringClient from .types.audit import ( BucketDestination, CloudControlAuditDetails, @@ -113,12 +114,40 @@ TargetResourceConfig, TargetResourceCreationConfig, ) +from .types.monitoring import ( + AggregatedComplianceReport, + AggregateFrameworkComplianceReportRequest, + AggregateFrameworkComplianceReportResponse, + CloudControlAssessmentDetails, + CloudControlReport, + ControlAssessmentDetails, + ControlComplianceSummary, + EvaluationState, + FetchFrameworkComplianceReportRequest, + FindingClass, + FindingSummary, + FrameworkComplianceReport, + FrameworkComplianceSummary, + ListControlComplianceSummariesRequest, + ListControlComplianceSummariesResponse, + ListFindingSummariesRequest, + ListFindingSummariesResponse, + ListFrameworkComplianceSummariesRequest, + ListFrameworkComplianceSummariesResponse, + ManualCloudControlAssessmentDetails, + SimilarControls, + TargetResourceDetails, +) __all__ = ( "AuditAsyncClient", "CmEnrollmentServiceAsyncClient", "ConfigAsyncClient", "DeploymentAsyncClient", + "MonitoringAsyncClient", + "AggregateFrameworkComplianceReportRequest", + "AggregateFrameworkComplianceReportResponse", + "AggregatedComplianceReport", "AllowedValues", "AttributeSubstitutionRule", "AuditClient", @@ -128,6 +157,7 @@ "CalculateEffectiveCmEnrollmentRequest", "CalculateEffectiveCmEnrollmentResponse", "CloudControl", + "CloudControlAssessmentDetails", "CloudControlAuditDetails", "CloudControlCategory", "CloudControlDeployment", @@ -135,11 +165,14 @@ "CloudControlDetails", "CloudControlGroupAuditDetails", "CloudControlMetadata", + "CloudControlReport", "CloudProvider", "CmEnrollment", "CmEnrollmentServiceClient", "ComplianceState", "ConfigClient", + "ControlAssessmentDetails", + "ControlComplianceSummary", "ControlFamily", "CreateCloudControlRequest", "CreateFrameworkAuditRequest", @@ -151,13 +184,19 @@ "DeploymentClient", "DeploymentState", "EnforcementMode", + "EvaluationState", "EvidenceDetails", + "FetchFrameworkComplianceReportRequest", + "FindingClass", "FindingDetails", + "FindingSummary", "FolderCreationConfig", "Framework", "FrameworkAudit", "FrameworkAuditDestination", "FrameworkCategory", + "FrameworkComplianceReport", + "FrameworkComplianceSummary", "FrameworkDeployment", "FrameworkDeploymentReference", "FrameworkReference", @@ -173,12 +212,20 @@ "ListCloudControlDeploymentsResponse", "ListCloudControlsRequest", "ListCloudControlsResponse", + "ListControlComplianceSummariesRequest", + "ListControlComplianceSummariesResponse", + "ListFindingSummariesRequest", + "ListFindingSummariesResponse", "ListFrameworkAuditsRequest", "ListFrameworkAuditsResponse", + "ListFrameworkComplianceSummariesRequest", + "ListFrameworkComplianceSummariesResponse", "ListFrameworkDeploymentsRequest", "ListFrameworkDeploymentsResponse", "ListFrameworksRequest", "ListFrameworksResponse", + "ManualCloudControlAssessmentDetails", + "MonitoringClient", "ObservationDetails", "OperationMetadata", "ParamValue", @@ -193,9 +240,11 @@ "Rule", "RuleActionType", "Severity", + "SimilarControls", "StringList", "TargetResourceConfig", "TargetResourceCreationConfig", + "TargetResourceDetails", "TargetResourceType", "UpdateCloudControlRequest", "UpdateCmEnrollmentRequest", diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/gapic_metadata.json b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/gapic_metadata.json index cda696c6068c..91c3d80353af 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/gapic_metadata.json +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/gapic_metadata.json @@ -410,6 +410,100 @@ } } } + }, + "Monitoring": { + "clients": { + "grpc": { + "libraryClient": "MonitoringClient", + "rpcs": { + "AggregateFrameworkComplianceReport": { + "methods": [ + "aggregate_framework_compliance_report" + ] + }, + "FetchFrameworkComplianceReport": { + "methods": [ + "fetch_framework_compliance_report" + ] + }, + "ListControlComplianceSummaries": { + "methods": [ + "list_control_compliance_summaries" + ] + }, + "ListFindingSummaries": { + "methods": [ + "list_finding_summaries" + ] + }, + "ListFrameworkComplianceSummaries": { + "methods": [ + "list_framework_compliance_summaries" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MonitoringAsyncClient", + "rpcs": { + "AggregateFrameworkComplianceReport": { + "methods": [ + "aggregate_framework_compliance_report" + ] + }, + "FetchFrameworkComplianceReport": { + "methods": [ + "fetch_framework_compliance_report" + ] + }, + "ListControlComplianceSummaries": { + "methods": [ + "list_control_compliance_summaries" + ] + }, + "ListFindingSummaries": { + "methods": [ + "list_finding_summaries" + ] + }, + "ListFrameworkComplianceSummaries": { + "methods": [ + "list_framework_compliance_summaries" + ] + } + } + }, + "rest": { + "libraryClient": "MonitoringClient", + "rpcs": { + "AggregateFrameworkComplianceReport": { + "methods": [ + "aggregate_framework_compliance_report" + ] + }, + "FetchFrameworkComplianceReport": { + "methods": [ + "fetch_framework_compliance_report" + ] + }, + "ListControlComplianceSummaries": { + "methods": [ + "list_control_compliance_summaries" + ] + }, + "ListFindingSummaries": { + "methods": [ + "list_finding_summaries" + ] + }, + "ListFrameworkComplianceSummaries": { + "methods": [ + "list_framework_compliance_summaries" + ] + } + } + } + } } } } diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/__init__.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/__init__.py new file mode 100644 index 000000000000..e7c72c30d128 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import MonitoringAsyncClient +from .client import MonitoringClient + +__all__ = ( + "MonitoringClient", + "MonitoringAsyncClient", +) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/async_client.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/async_client.py new file mode 100644 index 000000000000..915f2ac94bea --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/async_client.py @@ -0,0 +1,1270 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.cloudsecuritycompliance_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.cloudsecuritycompliance_v1.services.monitoring import pagers +from google.cloud.cloudsecuritycompliance_v1.types import common, monitoring + +from .client import MonitoringClient +from .transports.base import DEFAULT_CLIENT_INFO, MonitoringTransport +from .transports.grpc_asyncio import MonitoringGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class MonitoringAsyncClient: + """Service describing handlers for resources""" + + _client: MonitoringClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = MonitoringClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MonitoringClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = MonitoringClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = MonitoringClient._DEFAULT_UNIVERSE + + control_compliance_summary_path = staticmethod( + MonitoringClient.control_compliance_summary_path + ) + parse_control_compliance_summary_path = staticmethod( + MonitoringClient.parse_control_compliance_summary_path + ) + finding_summary_path = staticmethod(MonitoringClient.finding_summary_path) + parse_finding_summary_path = staticmethod( + MonitoringClient.parse_finding_summary_path + ) + framework_compliance_report_path = staticmethod( + MonitoringClient.framework_compliance_report_path + ) + parse_framework_compliance_report_path = staticmethod( + MonitoringClient.parse_framework_compliance_report_path + ) + framework_compliance_summary_path = staticmethod( + MonitoringClient.framework_compliance_summary_path + ) + parse_framework_compliance_summary_path = staticmethod( + MonitoringClient.parse_framework_compliance_summary_path + ) + common_billing_account_path = staticmethod( + MonitoringClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + MonitoringClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(MonitoringClient.common_folder_path) + parse_common_folder_path = staticmethod(MonitoringClient.parse_common_folder_path) + common_organization_path = staticmethod(MonitoringClient.common_organization_path) + parse_common_organization_path = staticmethod( + MonitoringClient.parse_common_organization_path + ) + common_project_path = staticmethod(MonitoringClient.common_project_path) + parse_common_project_path = staticmethod(MonitoringClient.parse_common_project_path) + common_location_path = staticmethod(MonitoringClient.common_location_path) + parse_common_location_path = staticmethod( + MonitoringClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MonitoringAsyncClient: The constructed client. + """ + return MonitoringClient.from_service_account_info.__func__(MonitoringAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MonitoringAsyncClient: The constructed client. + """ + return MonitoringClient.from_service_account_file.__func__(MonitoringAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MonitoringClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MonitoringTransport: + """Returns the transport used by the client instance. + + Returns: + MonitoringTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = MonitoringClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, MonitoringTransport, Callable[..., MonitoringTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the monitoring async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MonitoringTransport,Callable[..., MonitoringTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MonitoringTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MonitoringClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient`.", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "credentialsType": None, + }, + ) + + async def list_framework_compliance_summaries( + self, + request: Optional[ + Union[monitoring.ListFrameworkComplianceSummariesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFrameworkComplianceSummariesAsyncPager: + r"""Lists the framework compliance summary for a given + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + async def sample_list_framework_compliance_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListFrameworkComplianceSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_framework_compliance_summaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesRequest, dict]]): + The request object. The request message for + [ListFrameworkComplianceSummariesRequest][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesRequest]. + parent (:class:`str`): + Required. The parent scope for the + framework compliance summary. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListFrameworkComplianceSummariesAsyncPager: + The response message for + [ListFrameworkComplianceSummariesResponse][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesResponse]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, monitoring.ListFrameworkComplianceSummariesRequest): + request = monitoring.ListFrameworkComplianceSummariesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_framework_compliance_summaries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListFrameworkComplianceSummariesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_finding_summaries( + self, + request: Optional[Union[monitoring.ListFindingSummariesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFindingSummariesAsyncPager: + r"""Lists the finding summary by category for a given + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + async def sample_list_finding_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListFindingSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_finding_summaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesRequest, dict]]): + The request object. The request message for [ListFindingSummaries][]. + parent (:class:`str`): + Required. The parent scope for the + framework overview page. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListFindingSummariesAsyncPager: + The response message for [ListFindingSummaries][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, monitoring.ListFindingSummariesRequest): + request = monitoring.ListFindingSummariesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_finding_summaries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListFindingSummariesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_framework_compliance_report( + self, + request: Optional[ + Union[monitoring.FetchFrameworkComplianceReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.FrameworkComplianceReport: + r"""Fetches the framework compliance report for a given + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + async def sample_fetch_framework_compliance_report(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.FetchFrameworkComplianceReportRequest( + name="name_value", + ) + + # Make the request + response = await client.fetch_framework_compliance_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.FetchFrameworkComplianceReportRequest, dict]]): + The request object. The request message for + [FetchFrameworkComplianceReport][]. + name (:class:`str`): + Required. The name of the framework + compliance report to retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.types.FrameworkComplianceReport: + The response message for + [GetFrameworkComplianceReport][]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, monitoring.FetchFrameworkComplianceReportRequest): + request = monitoring.FetchFrameworkComplianceReportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_framework_compliance_report + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_control_compliance_summaries( + self, + request: Optional[ + Union[monitoring.ListControlComplianceSummariesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListControlComplianceSummariesAsyncPager: + r"""Lists the control compliance summary for a given + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + async def sample_list_control_compliance_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListControlComplianceSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_control_compliance_summaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesRequest, dict]]): + The request object. The request message for + [ListControlComplianceSummaries][]. + parent (:class:`str`): + Required. The parent scope for the + framework overview page. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListControlComplianceSummariesAsyncPager: + The response message for + [ListControlComplianceSummaries][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, monitoring.ListControlComplianceSummariesRequest): + request = monitoring.ListControlComplianceSummariesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_control_compliance_summaries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListControlComplianceSummariesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def aggregate_framework_compliance_report( + self, + request: Optional[ + Union[monitoring.AggregateFrameworkComplianceReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.AggregateFrameworkComplianceReportResponse: + r"""Gets the aggregated compliance report over time for a + given scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + async def sample_aggregate_framework_compliance_report(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.AggregateFrameworkComplianceReportRequest( + name="name_value", + ) + + # Make the request + response = await client.aggregate_framework_compliance_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.AggregateFrameworkComplianceReportRequest, dict]]): + The request object. The request message for + [AggregateFrameworkComplianceReport][]. + name (:class:`str`): + Required. The name of the aggregated compliance report + over time to retrieve. + + The supported format is: + ``organizations/{organization_id}/locations/{location}/frameworkComplianceReports/{framework_compliance_report}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.types.AggregateFrameworkComplianceReportResponse: + The response message for + [AggregateFrameworkComplianceReport][]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, monitoring.AggregateFrameworkComplianceReportRequest + ): + request = monitoring.AggregateFrameworkComplianceReportRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.aggregate_framework_compliance_report + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "MonitoringAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("MonitoringAsyncClient",) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/client.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/client.py new file mode 100644 index 000000000000..ae294197466c --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/client.py @@ -0,0 +1,1757 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.cloudsecuritycompliance_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.cloudsecuritycompliance_v1.services.monitoring import pagers +from google.cloud.cloudsecuritycompliance_v1.types import common, monitoring + +from .transports.base import DEFAULT_CLIENT_INFO, MonitoringTransport +from .transports.grpc import MonitoringGrpcTransport +from .transports.grpc_asyncio import MonitoringGrpcAsyncIOTransport +from .transports.rest import MonitoringRestTransport + + +class MonitoringClientMeta(type): + """Metaclass for the Monitoring client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[MonitoringTransport]] + _transport_registry["grpc"] = MonitoringGrpcTransport + _transport_registry["grpc_asyncio"] = MonitoringGrpcAsyncIOTransport + _transport_registry["rest"] = MonitoringRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[MonitoringTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MonitoringClient(metaclass=MonitoringClientMeta): + """Service describing handlers for resources""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "cloudsecuritycompliance.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "cloudsecuritycompliance.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MonitoringClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MonitoringClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MonitoringTransport: + """Returns the transport used by the client instance. + + Returns: + MonitoringTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def control_compliance_summary_path( + project: str, + location: str, + framework_compliance_report: str, + control_compliance_summary: str, + ) -> str: + """Returns a fully-qualified control_compliance_summary string.""" + return "projects/{project}/locations/{location}/frameworkComplianceReports/{framework_compliance_report}/controlComplianceSummaries/{control_compliance_summary}".format( + project=project, + location=location, + framework_compliance_report=framework_compliance_report, + control_compliance_summary=control_compliance_summary, + ) + + @staticmethod + def parse_control_compliance_summary_path(path: str) -> Dict[str, str]: + """Parses a control_compliance_summary path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/frameworkComplianceReports/(?P.+?)/controlComplianceSummaries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def finding_summary_path( + project: str, + location: str, + finding_summary: str, + ) -> str: + """Returns a fully-qualified finding_summary string.""" + return "projects/{project}/locations/{location}/findingSummaries/{finding_summary}".format( + project=project, + location=location, + finding_summary=finding_summary, + ) + + @staticmethod + def parse_finding_summary_path(path: str) -> Dict[str, str]: + """Parses a finding_summary path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/findingSummaries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def framework_compliance_report_path( + project: str, + location: str, + framework_compliance_report: str, + ) -> str: + """Returns a fully-qualified framework_compliance_report string.""" + return "projects/{project}/locations/{location}/frameworkComplianceReports/{framework_compliance_report}".format( + project=project, + location=location, + framework_compliance_report=framework_compliance_report, + ) + + @staticmethod + def parse_framework_compliance_report_path(path: str) -> Dict[str, str]: + """Parses a framework_compliance_report path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/frameworkComplianceReports/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def framework_compliance_summary_path( + project: str, + location: str, + framework_compliance_summary: str, + ) -> str: + """Returns a fully-qualified framework_compliance_summary string.""" + return "projects/{project}/locations/{location}/frameworkComplianceSummaries/{framework_compliance_summary}".format( + project=project, + location=location, + framework_compliance_summary=framework_compliance_summary, + ) + + @staticmethod + def parse_framework_compliance_summary_path(path: str) -> Dict[str, str]: + """Parses a framework_compliance_summary path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/frameworkComplianceSummaries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = MonitoringClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = MonitoringClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MonitoringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MonitoringClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, MonitoringTransport, Callable[..., MonitoringTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the monitoring client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MonitoringTransport,Callable[..., MonitoringTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MonitoringTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = MonitoringClient._read_environment_variables() + self._client_cert_source = MonitoringClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = MonitoringClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, MonitoringTransport) + if transport_provided: + # transport is a MonitoringTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MonitoringTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or MonitoringClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[MonitoringTransport], Callable[..., MonitoringTransport] + ] = ( + MonitoringClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MonitoringTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.cloudsecuritycompliance_v1.MonitoringClient`.", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "credentialsType": None, + }, + ) + + def list_framework_compliance_summaries( + self, + request: Optional[ + Union[monitoring.ListFrameworkComplianceSummariesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFrameworkComplianceSummariesPager: + r"""Lists the framework compliance summary for a given + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + def sample_list_framework_compliance_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListFrameworkComplianceSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_framework_compliance_summaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesRequest, dict]): + The request object. The request message for + [ListFrameworkComplianceSummariesRequest][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesRequest]. + parent (str): + Required. The parent scope for the + framework compliance summary. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListFrameworkComplianceSummariesPager: + The response message for + [ListFrameworkComplianceSummariesResponse][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesResponse]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, monitoring.ListFrameworkComplianceSummariesRequest): + request = monitoring.ListFrameworkComplianceSummariesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_framework_compliance_summaries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListFrameworkComplianceSummariesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_finding_summaries( + self, + request: Optional[Union[monitoring.ListFindingSummariesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListFindingSummariesPager: + r"""Lists the finding summary by category for a given + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + def sample_list_finding_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListFindingSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_finding_summaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesRequest, dict]): + The request object. The request message for [ListFindingSummaries][]. + parent (str): + Required. The parent scope for the + framework overview page. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListFindingSummariesPager: + The response message for [ListFindingSummaries][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, monitoring.ListFindingSummariesRequest): + request = monitoring.ListFindingSummariesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_finding_summaries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListFindingSummariesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_framework_compliance_report( + self, + request: Optional[ + Union[monitoring.FetchFrameworkComplianceReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.FrameworkComplianceReport: + r"""Fetches the framework compliance report for a given + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + def sample_fetch_framework_compliance_report(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.FetchFrameworkComplianceReportRequest( + name="name_value", + ) + + # Make the request + response = client.fetch_framework_compliance_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudsecuritycompliance_v1.types.FetchFrameworkComplianceReportRequest, dict]): + The request object. The request message for + [FetchFrameworkComplianceReport][]. + name (str): + Required. The name of the framework + compliance report to retrieve. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.types.FrameworkComplianceReport: + The response message for + [GetFrameworkComplianceReport][]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, monitoring.FetchFrameworkComplianceReportRequest): + request = monitoring.FetchFrameworkComplianceReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_framework_compliance_report + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_control_compliance_summaries( + self, + request: Optional[ + Union[monitoring.ListControlComplianceSummariesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListControlComplianceSummariesPager: + r"""Lists the control compliance summary for a given + scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + def sample_list_control_compliance_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListControlComplianceSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_control_compliance_summaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesRequest, dict]): + The request object. The request message for + [ListControlComplianceSummaries][]. + parent (str): + Required. The parent scope for the + framework overview page. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListControlComplianceSummariesPager: + The response message for + [ListControlComplianceSummaries][]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, monitoring.ListControlComplianceSummariesRequest): + request = monitoring.ListControlComplianceSummariesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_control_compliance_summaries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListControlComplianceSummariesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def aggregate_framework_compliance_report( + self, + request: Optional[ + Union[monitoring.AggregateFrameworkComplianceReportRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.AggregateFrameworkComplianceReportResponse: + r"""Gets the aggregated compliance report over time for a + given scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import cloudsecuritycompliance_v1 + + def sample_aggregate_framework_compliance_report(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.AggregateFrameworkComplianceReportRequest( + name="name_value", + ) + + # Make the request + response = client.aggregate_framework_compliance_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.cloudsecuritycompliance_v1.types.AggregateFrameworkComplianceReportRequest, dict]): + The request object. The request message for + [AggregateFrameworkComplianceReport][]. + name (str): + Required. The name of the aggregated compliance report + over time to retrieve. + + The supported format is: + ``organizations/{organization_id}/locations/{location}/frameworkComplianceReports/{framework_compliance_report}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.cloudsecuritycompliance_v1.types.AggregateFrameworkComplianceReportResponse: + The response message for + [AggregateFrameworkComplianceReport][]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, monitoring.AggregateFrameworkComplianceReportRequest + ): + request = monitoring.AggregateFrameworkComplianceReportRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.aggregate_framework_compliance_report + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "MonitoringClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("MonitoringClient",) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/pagers.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/pagers.py new file mode 100644 index 000000000000..ba0032ac4612 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/pagers.py @@ -0,0 +1,517 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.cloudsecuritycompliance_v1.types import monitoring + + +class ListFrameworkComplianceSummariesPager: + """A pager for iterating through ``list_framework_compliance_summaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``framework_compliance_summaries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListFrameworkComplianceSummaries`` requests and continue to iterate + through the ``framework_compliance_summaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., monitoring.ListFrameworkComplianceSummariesResponse], + request: monitoring.ListFrameworkComplianceSummariesRequest, + response: monitoring.ListFrameworkComplianceSummariesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesRequest): + The initial request object. + response (google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = monitoring.ListFrameworkComplianceSummariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[monitoring.ListFrameworkComplianceSummariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[monitoring.FrameworkComplianceSummary]: + for page in self.pages: + yield from page.framework_compliance_summaries + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListFrameworkComplianceSummariesAsyncPager: + """A pager for iterating through ``list_framework_compliance_summaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``framework_compliance_summaries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListFrameworkComplianceSummaries`` requests and continue to iterate + through the ``framework_compliance_summaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[monitoring.ListFrameworkComplianceSummariesResponse] + ], + request: monitoring.ListFrameworkComplianceSummariesRequest, + response: monitoring.ListFrameworkComplianceSummariesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesRequest): + The initial request object. + response (google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = monitoring.ListFrameworkComplianceSummariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[monitoring.ListFrameworkComplianceSummariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[monitoring.FrameworkComplianceSummary]: + async def async_generator(): + async for page in self.pages: + for response in page.framework_compliance_summaries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListFindingSummariesPager: + """A pager for iterating through ``list_finding_summaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``finding_summaries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListFindingSummaries`` requests and continue to iterate + through the ``finding_summaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., monitoring.ListFindingSummariesResponse], + request: monitoring.ListFindingSummariesRequest, + response: monitoring.ListFindingSummariesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesRequest): + The initial request object. + response (google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = monitoring.ListFindingSummariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[monitoring.ListFindingSummariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[monitoring.FindingSummary]: + for page in self.pages: + yield from page.finding_summaries + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListFindingSummariesAsyncPager: + """A pager for iterating through ``list_finding_summaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``finding_summaries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListFindingSummaries`` requests and continue to iterate + through the ``finding_summaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[monitoring.ListFindingSummariesResponse]], + request: monitoring.ListFindingSummariesRequest, + response: monitoring.ListFindingSummariesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesRequest): + The initial request object. + response (google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = monitoring.ListFindingSummariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[monitoring.ListFindingSummariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[monitoring.FindingSummary]: + async def async_generator(): + async for page in self.pages: + for response in page.finding_summaries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListControlComplianceSummariesPager: + """A pager for iterating through ``list_control_compliance_summaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``control_compliance_summaries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListControlComplianceSummaries`` requests and continue to iterate + through the ``control_compliance_summaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., monitoring.ListControlComplianceSummariesResponse], + request: monitoring.ListControlComplianceSummariesRequest, + response: monitoring.ListControlComplianceSummariesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesRequest): + The initial request object. + response (google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = monitoring.ListControlComplianceSummariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[monitoring.ListControlComplianceSummariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[monitoring.ControlComplianceSummary]: + for page in self.pages: + yield from page.control_compliance_summaries + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListControlComplianceSummariesAsyncPager: + """A pager for iterating through ``list_control_compliance_summaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``control_compliance_summaries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListControlComplianceSummaries`` requests and continue to iterate + through the ``control_compliance_summaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[monitoring.ListControlComplianceSummariesResponse] + ], + request: monitoring.ListControlComplianceSummariesRequest, + response: monitoring.ListControlComplianceSummariesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesRequest): + The initial request object. + response (google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = monitoring.ListControlComplianceSummariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[monitoring.ListControlComplianceSummariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[monitoring.ControlComplianceSummary]: + async def async_generator(): + async for page in self.pages: + for response in page.control_compliance_summaries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/README.rst b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/README.rst new file mode 100644 index 000000000000..5c05c5143cb9 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MonitoringTransport` is the ABC for all transports. +- public child `MonitoringGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MonitoringGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMonitoringRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MonitoringRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/__init__.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/__init__.py new file mode 100644 index 000000000000..9274f35fea83 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MonitoringTransport +from .grpc import MonitoringGrpcTransport +from .grpc_asyncio import MonitoringGrpcAsyncIOTransport +from .rest import MonitoringRestInterceptor, MonitoringRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MonitoringTransport]] +_transport_registry["grpc"] = MonitoringGrpcTransport +_transport_registry["grpc_asyncio"] = MonitoringGrpcAsyncIOTransport +_transport_registry["rest"] = MonitoringRestTransport + +__all__ = ( + "MonitoringTransport", + "MonitoringGrpcTransport", + "MonitoringGrpcAsyncIOTransport", + "MonitoringRestTransport", + "MonitoringRestInterceptor", +) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/base.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/base.py new file mode 100644 index 000000000000..e16f6d086f2b --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/base.py @@ -0,0 +1,368 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.cloudsecuritycompliance_v1 import gapic_version as package_version +from google.cloud.cloudsecuritycompliance_v1.types import monitoring + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class MonitoringTransport(abc.ABC): + """Abstract transport class for Monitoring.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "cloudsecuritycompliance.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudsecuritycompliance.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_framework_compliance_summaries: gapic_v1.method.wrap_method( + self.list_framework_compliance_summaries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_finding_summaries: gapic_v1.method.wrap_method( + self.list_finding_summaries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_framework_compliance_report: gapic_v1.method.wrap_method( + self.fetch_framework_compliance_report, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_control_compliance_summaries: gapic_v1.method.wrap_method( + self.list_control_compliance_summaries, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.aggregate_framework_compliance_report: gapic_v1.method.wrap_method( + self.aggregate_framework_compliance_report, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_framework_compliance_summaries( + self, + ) -> Callable[ + [monitoring.ListFrameworkComplianceSummariesRequest], + Union[ + monitoring.ListFrameworkComplianceSummariesResponse, + Awaitable[monitoring.ListFrameworkComplianceSummariesResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_finding_summaries( + self, + ) -> Callable[ + [monitoring.ListFindingSummariesRequest], + Union[ + monitoring.ListFindingSummariesResponse, + Awaitable[monitoring.ListFindingSummariesResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_framework_compliance_report( + self, + ) -> Callable[ + [monitoring.FetchFrameworkComplianceReportRequest], + Union[ + monitoring.FrameworkComplianceReport, + Awaitable[monitoring.FrameworkComplianceReport], + ], + ]: + raise NotImplementedError() + + @property + def list_control_compliance_summaries( + self, + ) -> Callable[ + [monitoring.ListControlComplianceSummariesRequest], + Union[ + monitoring.ListControlComplianceSummariesResponse, + Awaitable[monitoring.ListControlComplianceSummariesResponse], + ], + ]: + raise NotImplementedError() + + @property + def aggregate_framework_compliance_report( + self, + ) -> Callable[ + [monitoring.AggregateFrameworkComplianceReportRequest], + Union[ + monitoring.AggregateFrameworkComplianceReportResponse, + Awaitable[monitoring.AggregateFrameworkComplianceReportResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("MonitoringTransport",) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/grpc.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/grpc.py new file mode 100644 index 000000000000..3ed517af1b0d --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/grpc.py @@ -0,0 +1,602 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.cloudsecuritycompliance_v1.types import monitoring + +from .base import DEFAULT_CLIENT_INFO, MonitoringTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class MonitoringGrpcTransport(MonitoringTransport): + """gRPC backend transport for Monitoring. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "cloudsecuritycompliance.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudsecuritycompliance.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "cloudsecuritycompliance.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def list_framework_compliance_summaries( + self, + ) -> Callable[ + [monitoring.ListFrameworkComplianceSummariesRequest], + monitoring.ListFrameworkComplianceSummariesResponse, + ]: + r"""Return a callable for the list framework compliance + summaries method over gRPC. + + Lists the framework compliance summary for a given + scope. + + Returns: + Callable[[~.ListFrameworkComplianceSummariesRequest], + ~.ListFrameworkComplianceSummariesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_framework_compliance_summaries" not in self._stubs: + self._stubs[ + "list_framework_compliance_summaries" + ] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/ListFrameworkComplianceSummaries", + request_serializer=monitoring.ListFrameworkComplianceSummariesRequest.serialize, + response_deserializer=monitoring.ListFrameworkComplianceSummariesResponse.deserialize, + ) + return self._stubs["list_framework_compliance_summaries"] + + @property + def list_finding_summaries( + self, + ) -> Callable[ + [monitoring.ListFindingSummariesRequest], + monitoring.ListFindingSummariesResponse, + ]: + r"""Return a callable for the list finding summaries method over gRPC. + + Lists the finding summary by category for a given + scope. + + Returns: + Callable[[~.ListFindingSummariesRequest], + ~.ListFindingSummariesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_finding_summaries" not in self._stubs: + self._stubs["list_finding_summaries"] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/ListFindingSummaries", + request_serializer=monitoring.ListFindingSummariesRequest.serialize, + response_deserializer=monitoring.ListFindingSummariesResponse.deserialize, + ) + return self._stubs["list_finding_summaries"] + + @property + def fetch_framework_compliance_report( + self, + ) -> Callable[ + [monitoring.FetchFrameworkComplianceReportRequest], + monitoring.FrameworkComplianceReport, + ]: + r"""Return a callable for the fetch framework compliance + report method over gRPC. + + Fetches the framework compliance report for a given + scope. + + Returns: + Callable[[~.FetchFrameworkComplianceReportRequest], + ~.FrameworkComplianceReport]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_framework_compliance_report" not in self._stubs: + self._stubs[ + "fetch_framework_compliance_report" + ] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/FetchFrameworkComplianceReport", + request_serializer=monitoring.FetchFrameworkComplianceReportRequest.serialize, + response_deserializer=monitoring.FrameworkComplianceReport.deserialize, + ) + return self._stubs["fetch_framework_compliance_report"] + + @property + def list_control_compliance_summaries( + self, + ) -> Callable[ + [monitoring.ListControlComplianceSummariesRequest], + monitoring.ListControlComplianceSummariesResponse, + ]: + r"""Return a callable for the list control compliance + summaries method over gRPC. + + Lists the control compliance summary for a given + scope. + + Returns: + Callable[[~.ListControlComplianceSummariesRequest], + ~.ListControlComplianceSummariesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_control_compliance_summaries" not in self._stubs: + self._stubs[ + "list_control_compliance_summaries" + ] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/ListControlComplianceSummaries", + request_serializer=monitoring.ListControlComplianceSummariesRequest.serialize, + response_deserializer=monitoring.ListControlComplianceSummariesResponse.deserialize, + ) + return self._stubs["list_control_compliance_summaries"] + + @property + def aggregate_framework_compliance_report( + self, + ) -> Callable[ + [monitoring.AggregateFrameworkComplianceReportRequest], + monitoring.AggregateFrameworkComplianceReportResponse, + ]: + r"""Return a callable for the aggregate framework compliance + report method over gRPC. + + Gets the aggregated compliance report over time for a + given scope. + + Returns: + Callable[[~.AggregateFrameworkComplianceReportRequest], + ~.AggregateFrameworkComplianceReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "aggregate_framework_compliance_report" not in self._stubs: + self._stubs[ + "aggregate_framework_compliance_report" + ] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/AggregateFrameworkComplianceReport", + request_serializer=monitoring.AggregateFrameworkComplianceReportRequest.serialize, + response_deserializer=monitoring.AggregateFrameworkComplianceReportResponse.deserialize, + ) + return self._stubs["aggregate_framework_compliance_report"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("MonitoringGrpcTransport",) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/grpc_asyncio.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a998d221f40a --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/grpc_asyncio.py @@ -0,0 +1,720 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.cloudsecuritycompliance_v1.types import monitoring + +from .base import DEFAULT_CLIENT_INFO, MonitoringTransport +from .grpc import MonitoringGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class MonitoringGrpcAsyncIOTransport(MonitoringTransport): + """gRPC AsyncIO backend transport for Monitoring. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "cloudsecuritycompliance.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "cloudsecuritycompliance.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudsecuritycompliance.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_framework_compliance_summaries( + self, + ) -> Callable[ + [monitoring.ListFrameworkComplianceSummariesRequest], + Awaitable[monitoring.ListFrameworkComplianceSummariesResponse], + ]: + r"""Return a callable for the list framework compliance + summaries method over gRPC. + + Lists the framework compliance summary for a given + scope. + + Returns: + Callable[[~.ListFrameworkComplianceSummariesRequest], + Awaitable[~.ListFrameworkComplianceSummariesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_framework_compliance_summaries" not in self._stubs: + self._stubs[ + "list_framework_compliance_summaries" + ] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/ListFrameworkComplianceSummaries", + request_serializer=monitoring.ListFrameworkComplianceSummariesRequest.serialize, + response_deserializer=monitoring.ListFrameworkComplianceSummariesResponse.deserialize, + ) + return self._stubs["list_framework_compliance_summaries"] + + @property + def list_finding_summaries( + self, + ) -> Callable[ + [monitoring.ListFindingSummariesRequest], + Awaitable[monitoring.ListFindingSummariesResponse], + ]: + r"""Return a callable for the list finding summaries method over gRPC. + + Lists the finding summary by category for a given + scope. + + Returns: + Callable[[~.ListFindingSummariesRequest], + Awaitable[~.ListFindingSummariesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_finding_summaries" not in self._stubs: + self._stubs["list_finding_summaries"] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/ListFindingSummaries", + request_serializer=monitoring.ListFindingSummariesRequest.serialize, + response_deserializer=monitoring.ListFindingSummariesResponse.deserialize, + ) + return self._stubs["list_finding_summaries"] + + @property + def fetch_framework_compliance_report( + self, + ) -> Callable[ + [monitoring.FetchFrameworkComplianceReportRequest], + Awaitable[monitoring.FrameworkComplianceReport], + ]: + r"""Return a callable for the fetch framework compliance + report method over gRPC. + + Fetches the framework compliance report for a given + scope. + + Returns: + Callable[[~.FetchFrameworkComplianceReportRequest], + Awaitable[~.FrameworkComplianceReport]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_framework_compliance_report" not in self._stubs: + self._stubs[ + "fetch_framework_compliance_report" + ] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/FetchFrameworkComplianceReport", + request_serializer=monitoring.FetchFrameworkComplianceReportRequest.serialize, + response_deserializer=monitoring.FrameworkComplianceReport.deserialize, + ) + return self._stubs["fetch_framework_compliance_report"] + + @property + def list_control_compliance_summaries( + self, + ) -> Callable[ + [monitoring.ListControlComplianceSummariesRequest], + Awaitable[monitoring.ListControlComplianceSummariesResponse], + ]: + r"""Return a callable for the list control compliance + summaries method over gRPC. + + Lists the control compliance summary for a given + scope. + + Returns: + Callable[[~.ListControlComplianceSummariesRequest], + Awaitable[~.ListControlComplianceSummariesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_control_compliance_summaries" not in self._stubs: + self._stubs[ + "list_control_compliance_summaries" + ] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/ListControlComplianceSummaries", + request_serializer=monitoring.ListControlComplianceSummariesRequest.serialize, + response_deserializer=monitoring.ListControlComplianceSummariesResponse.deserialize, + ) + return self._stubs["list_control_compliance_summaries"] + + @property + def aggregate_framework_compliance_report( + self, + ) -> Callable[ + [monitoring.AggregateFrameworkComplianceReportRequest], + Awaitable[monitoring.AggregateFrameworkComplianceReportResponse], + ]: + r"""Return a callable for the aggregate framework compliance + report method over gRPC. + + Gets the aggregated compliance report over time for a + given scope. + + Returns: + Callable[[~.AggregateFrameworkComplianceReportRequest], + Awaitable[~.AggregateFrameworkComplianceReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "aggregate_framework_compliance_report" not in self._stubs: + self._stubs[ + "aggregate_framework_compliance_report" + ] = self._logged_channel.unary_unary( + "/google.cloud.cloudsecuritycompliance.v1.Monitoring/AggregateFrameworkComplianceReport", + request_serializer=monitoring.AggregateFrameworkComplianceReportRequest.serialize, + response_deserializer=monitoring.AggregateFrameworkComplianceReportResponse.deserialize, + ) + return self._stubs["aggregate_framework_compliance_report"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_framework_compliance_summaries: self._wrap_method( + self.list_framework_compliance_summaries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_finding_summaries: self._wrap_method( + self.list_finding_summaries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_framework_compliance_report: self._wrap_method( + self.fetch_framework_compliance_report, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_control_compliance_summaries: self._wrap_method( + self.list_control_compliance_summaries, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.aggregate_framework_compliance_report: self._wrap_method( + self.aggregate_framework_compliance_report, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("MonitoringGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest.py new file mode 100644 index 000000000000..bc744a7c3e0b --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest.py @@ -0,0 +1,2275 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.cloudsecuritycompliance_v1.types import monitoring + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseMonitoringRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class MonitoringRestInterceptor: + """Interceptor for Monitoring. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MonitoringRestTransport. + + .. code-block:: python + class MyCustomMonitoringInterceptor(MonitoringRestInterceptor): + def pre_aggregate_framework_compliance_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregate_framework_compliance_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_framework_compliance_report(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_framework_compliance_report(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_control_compliance_summaries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_control_compliance_summaries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_finding_summaries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_finding_summaries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_framework_compliance_summaries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_framework_compliance_summaries(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MonitoringRestTransport(interceptor=MyCustomMonitoringInterceptor()) + client = MonitoringClient(transport=transport) + + + """ + + def pre_aggregate_framework_compliance_report( + self, + request: monitoring.AggregateFrameworkComplianceReportRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.AggregateFrameworkComplianceReportRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for aggregate_framework_compliance_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_aggregate_framework_compliance_report( + self, response: monitoring.AggregateFrameworkComplianceReportResponse + ) -> monitoring.AggregateFrameworkComplianceReportResponse: + """Post-rpc interceptor for aggregate_framework_compliance_report + + DEPRECATED. Please use the `post_aggregate_framework_compliance_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. This `post_aggregate_framework_compliance_report` interceptor runs + before the `post_aggregate_framework_compliance_report_with_metadata` interceptor. + """ + return response + + def post_aggregate_framework_compliance_report_with_metadata( + self, + response: monitoring.AggregateFrameworkComplianceReportResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.AggregateFrameworkComplianceReportResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for aggregate_framework_compliance_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Monitoring server but before it is returned to user code. + + We recommend only using this `post_aggregate_framework_compliance_report_with_metadata` + interceptor in new development instead of the `post_aggregate_framework_compliance_report` interceptor. + When both interceptors are used, this `post_aggregate_framework_compliance_report_with_metadata` interceptor runs after the + `post_aggregate_framework_compliance_report` interceptor. The (possibly modified) response returned by + `post_aggregate_framework_compliance_report` will be passed to + `post_aggregate_framework_compliance_report_with_metadata`. + """ + return response, metadata + + def pre_fetch_framework_compliance_report( + self, + request: monitoring.FetchFrameworkComplianceReportRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.FetchFrameworkComplianceReportRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for fetch_framework_compliance_report + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_fetch_framework_compliance_report( + self, response: monitoring.FrameworkComplianceReport + ) -> monitoring.FrameworkComplianceReport: + """Post-rpc interceptor for fetch_framework_compliance_report + + DEPRECATED. Please use the `post_fetch_framework_compliance_report_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. This `post_fetch_framework_compliance_report` interceptor runs + before the `post_fetch_framework_compliance_report_with_metadata` interceptor. + """ + return response + + def post_fetch_framework_compliance_report_with_metadata( + self, + response: monitoring.FrameworkComplianceReport, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.FrameworkComplianceReport, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for fetch_framework_compliance_report + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Monitoring server but before it is returned to user code. + + We recommend only using this `post_fetch_framework_compliance_report_with_metadata` + interceptor in new development instead of the `post_fetch_framework_compliance_report` interceptor. + When both interceptors are used, this `post_fetch_framework_compliance_report_with_metadata` interceptor runs after the + `post_fetch_framework_compliance_report` interceptor. The (possibly modified) response returned by + `post_fetch_framework_compliance_report` will be passed to + `post_fetch_framework_compliance_report_with_metadata`. + """ + return response, metadata + + def pre_list_control_compliance_summaries( + self, + request: monitoring.ListControlComplianceSummariesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.ListControlComplianceSummariesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_control_compliance_summaries + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_list_control_compliance_summaries( + self, response: monitoring.ListControlComplianceSummariesResponse + ) -> monitoring.ListControlComplianceSummariesResponse: + """Post-rpc interceptor for list_control_compliance_summaries + + DEPRECATED. Please use the `post_list_control_compliance_summaries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. This `post_list_control_compliance_summaries` interceptor runs + before the `post_list_control_compliance_summaries_with_metadata` interceptor. + """ + return response + + def post_list_control_compliance_summaries_with_metadata( + self, + response: monitoring.ListControlComplianceSummariesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.ListControlComplianceSummariesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_control_compliance_summaries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Monitoring server but before it is returned to user code. + + We recommend only using this `post_list_control_compliance_summaries_with_metadata` + interceptor in new development instead of the `post_list_control_compliance_summaries` interceptor. + When both interceptors are used, this `post_list_control_compliance_summaries_with_metadata` interceptor runs after the + `post_list_control_compliance_summaries` interceptor. The (possibly modified) response returned by + `post_list_control_compliance_summaries` will be passed to + `post_list_control_compliance_summaries_with_metadata`. + """ + return response, metadata + + def pre_list_finding_summaries( + self, + request: monitoring.ListFindingSummariesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.ListFindingSummariesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_finding_summaries + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_list_finding_summaries( + self, response: monitoring.ListFindingSummariesResponse + ) -> monitoring.ListFindingSummariesResponse: + """Post-rpc interceptor for list_finding_summaries + + DEPRECATED. Please use the `post_list_finding_summaries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. This `post_list_finding_summaries` interceptor runs + before the `post_list_finding_summaries_with_metadata` interceptor. + """ + return response + + def post_list_finding_summaries_with_metadata( + self, + response: monitoring.ListFindingSummariesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.ListFindingSummariesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_finding_summaries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Monitoring server but before it is returned to user code. + + We recommend only using this `post_list_finding_summaries_with_metadata` + interceptor in new development instead of the `post_list_finding_summaries` interceptor. + When both interceptors are used, this `post_list_finding_summaries_with_metadata` interceptor runs after the + `post_list_finding_summaries` interceptor. The (possibly modified) response returned by + `post_list_finding_summaries` will be passed to + `post_list_finding_summaries_with_metadata`. + """ + return response, metadata + + def pre_list_framework_compliance_summaries( + self, + request: monitoring.ListFrameworkComplianceSummariesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.ListFrameworkComplianceSummariesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_framework_compliance_summaries + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_list_framework_compliance_summaries( + self, response: monitoring.ListFrameworkComplianceSummariesResponse + ) -> monitoring.ListFrameworkComplianceSummariesResponse: + """Post-rpc interceptor for list_framework_compliance_summaries + + DEPRECATED. Please use the `post_list_framework_compliance_summaries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. This `post_list_framework_compliance_summaries` interceptor runs + before the `post_list_framework_compliance_summaries_with_metadata` interceptor. + """ + return response + + def post_list_framework_compliance_summaries_with_metadata( + self, + response: monitoring.ListFrameworkComplianceSummariesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + monitoring.ListFrameworkComplianceSummariesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_framework_compliance_summaries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Monitoring server but before it is returned to user code. + + We recommend only using this `post_list_framework_compliance_summaries_with_metadata` + interceptor in new development instead of the `post_list_framework_compliance_summaries` interceptor. + When both interceptors are used, this `post_list_framework_compliance_summaries_with_metadata` interceptor runs after the + `post_list_framework_compliance_summaries` interceptor. The (possibly modified) response returned by + `post_list_framework_compliance_summaries` will be passed to + `post_list_framework_compliance_summaries_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Monitoring server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Monitoring server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MonitoringRestStub: + _session: AuthorizedSession + _host: str + _interceptor: MonitoringRestInterceptor + + +class MonitoringRestTransport(_BaseMonitoringRestTransport): + """REST backend synchronous transport for Monitoring. + + Service describing handlers for resources + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "cloudsecuritycompliance.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[MonitoringRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudsecuritycompliance.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MonitoringRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregateFrameworkComplianceReport( + _BaseMonitoringRestTransport._BaseAggregateFrameworkComplianceReport, + MonitoringRestStub, + ): + def __hash__(self): + return hash("MonitoringRestTransport.AggregateFrameworkComplianceReport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: monitoring.AggregateFrameworkComplianceReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.AggregateFrameworkComplianceReportResponse: + r"""Call the aggregate framework + compliance report method over HTTP. + + Args: + request (~.monitoring.AggregateFrameworkComplianceReportRequest): + The request object. The request message for + [AggregateFrameworkComplianceReport][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.monitoring.AggregateFrameworkComplianceReportResponse: + The response message for + [AggregateFrameworkComplianceReport][]. + + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseAggregateFrameworkComplianceReport._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_aggregate_framework_compliance_report( + request, metadata + ) + transcoded_request = _BaseMonitoringRestTransport._BaseAggregateFrameworkComplianceReport._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMonitoringRestTransport._BaseAggregateFrameworkComplianceReport._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.AggregateFrameworkComplianceReport", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "AggregateFrameworkComplianceReport", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MonitoringRestTransport._AggregateFrameworkComplianceReport._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = monitoring.AggregateFrameworkComplianceReportResponse() + pb_resp = monitoring.AggregateFrameworkComplianceReportResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_aggregate_framework_compliance_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_aggregate_framework_compliance_report_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + monitoring.AggregateFrameworkComplianceReportResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.aggregate_framework_compliance_report", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "AggregateFrameworkComplianceReport", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _FetchFrameworkComplianceReport( + _BaseMonitoringRestTransport._BaseFetchFrameworkComplianceReport, + MonitoringRestStub, + ): + def __hash__(self): + return hash("MonitoringRestTransport.FetchFrameworkComplianceReport") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: monitoring.FetchFrameworkComplianceReportRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.FrameworkComplianceReport: + r"""Call the fetch framework + compliance report method over HTTP. + + Args: + request (~.monitoring.FetchFrameworkComplianceReportRequest): + The request object. The request message for + [FetchFrameworkComplianceReport][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.monitoring.FrameworkComplianceReport: + The response message for + [GetFrameworkComplianceReport][]. + + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseFetchFrameworkComplianceReport._get_http_options() + ) + + request, metadata = self._interceptor.pre_fetch_framework_compliance_report( + request, metadata + ) + transcoded_request = _BaseMonitoringRestTransport._BaseFetchFrameworkComplianceReport._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMonitoringRestTransport._BaseFetchFrameworkComplianceReport._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.FetchFrameworkComplianceReport", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "FetchFrameworkComplianceReport", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MonitoringRestTransport._FetchFrameworkComplianceReport._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = monitoring.FrameworkComplianceReport() + pb_resp = monitoring.FrameworkComplianceReport.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_fetch_framework_compliance_report(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_fetch_framework_compliance_report_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = monitoring.FrameworkComplianceReport.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.fetch_framework_compliance_report", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "FetchFrameworkComplianceReport", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListControlComplianceSummaries( + _BaseMonitoringRestTransport._BaseListControlComplianceSummaries, + MonitoringRestStub, + ): + def __hash__(self): + return hash("MonitoringRestTransport.ListControlComplianceSummaries") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: monitoring.ListControlComplianceSummariesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.ListControlComplianceSummariesResponse: + r"""Call the list control compliance + summaries method over HTTP. + + Args: + request (~.monitoring.ListControlComplianceSummariesRequest): + The request object. The request message for + [ListControlComplianceSummaries][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.monitoring.ListControlComplianceSummariesResponse: + The response message for + [ListControlComplianceSummaries][]. + + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseListControlComplianceSummaries._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_control_compliance_summaries( + request, metadata + ) + transcoded_request = _BaseMonitoringRestTransport._BaseListControlComplianceSummaries._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMonitoringRestTransport._BaseListControlComplianceSummaries._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.ListControlComplianceSummaries", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListControlComplianceSummaries", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MonitoringRestTransport._ListControlComplianceSummaries._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = monitoring.ListControlComplianceSummariesResponse() + pb_resp = monitoring.ListControlComplianceSummariesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_control_compliance_summaries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_control_compliance_summaries_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + monitoring.ListControlComplianceSummariesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.list_control_compliance_summaries", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListControlComplianceSummaries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListFindingSummaries( + _BaseMonitoringRestTransport._BaseListFindingSummaries, MonitoringRestStub + ): + def __hash__(self): + return hash("MonitoringRestTransport.ListFindingSummaries") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: monitoring.ListFindingSummariesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.ListFindingSummariesResponse: + r"""Call the list finding summaries method over HTTP. + + Args: + request (~.monitoring.ListFindingSummariesRequest): + The request object. The request message for [ListFindingSummaries][]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.monitoring.ListFindingSummariesResponse: + The response message for [ListFindingSummaries][]. + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseListFindingSummaries._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_finding_summaries( + request, metadata + ) + transcoded_request = _BaseMonitoringRestTransport._BaseListFindingSummaries._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMonitoringRestTransport._BaseListFindingSummaries._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.ListFindingSummaries", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListFindingSummaries", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MonitoringRestTransport._ListFindingSummaries._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = monitoring.ListFindingSummariesResponse() + pb_resp = monitoring.ListFindingSummariesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_finding_summaries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_finding_summaries_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = monitoring.ListFindingSummariesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.list_finding_summaries", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListFindingSummaries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListFrameworkComplianceSummaries( + _BaseMonitoringRestTransport._BaseListFrameworkComplianceSummaries, + MonitoringRestStub, + ): + def __hash__(self): + return hash("MonitoringRestTransport.ListFrameworkComplianceSummaries") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: monitoring.ListFrameworkComplianceSummariesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> monitoring.ListFrameworkComplianceSummariesResponse: + r"""Call the list framework compliance + summaries method over HTTP. + + Args: + request (~.monitoring.ListFrameworkComplianceSummariesRequest): + The request object. The request message for + [ListFrameworkComplianceSummariesRequest][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesRequest]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.monitoring.ListFrameworkComplianceSummariesResponse: + The response message for + [ListFrameworkComplianceSummariesResponse][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesResponse]. + + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseListFrameworkComplianceSummaries._get_http_options() + ) + + ( + request, + metadata, + ) = self._interceptor.pre_list_framework_compliance_summaries( + request, metadata + ) + transcoded_request = _BaseMonitoringRestTransport._BaseListFrameworkComplianceSummaries._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMonitoringRestTransport._BaseListFrameworkComplianceSummaries._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.ListFrameworkComplianceSummaries", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListFrameworkComplianceSummaries", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + MonitoringRestTransport._ListFrameworkComplianceSummaries._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = monitoring.ListFrameworkComplianceSummariesResponse() + pb_resp = monitoring.ListFrameworkComplianceSummariesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_framework_compliance_summaries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_framework_compliance_summaries_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + monitoring.ListFrameworkComplianceSummariesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.list_framework_compliance_summaries", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListFrameworkComplianceSummaries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def aggregate_framework_compliance_report( + self, + ) -> Callable[ + [monitoring.AggregateFrameworkComplianceReportRequest], + monitoring.AggregateFrameworkComplianceReportResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregateFrameworkComplianceReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_framework_compliance_report( + self, + ) -> Callable[ + [monitoring.FetchFrameworkComplianceReportRequest], + monitoring.FrameworkComplianceReport, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchFrameworkComplianceReport(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_control_compliance_summaries( + self, + ) -> Callable[ + [monitoring.ListControlComplianceSummariesRequest], + monitoring.ListControlComplianceSummariesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListControlComplianceSummaries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_finding_summaries( + self, + ) -> Callable[ + [monitoring.ListFindingSummariesRequest], + monitoring.ListFindingSummariesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFindingSummaries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_framework_compliance_summaries( + self, + ) -> Callable[ + [monitoring.ListFrameworkComplianceSummariesRequest], + monitoring.ListFrameworkComplianceSummariesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFrameworkComplianceSummaries(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseMonitoringRestTransport._BaseGetLocation, MonitoringRestStub + ): + def __hash__(self): + return hash("MonitoringRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = ( + _BaseMonitoringRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMonitoringRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.GetLocation", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MonitoringRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseMonitoringRestTransport._BaseListLocations, MonitoringRestStub + ): + def __hash__(self): + return hash("MonitoringRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = ( + _BaseMonitoringRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMonitoringRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.ListLocations", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MonitoringRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseMonitoringRestTransport._BaseCancelOperation, MonitoringRestStub + ): + def __hash__(self): + return hash("MonitoringRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseMonitoringRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseMonitoringRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseMonitoringRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.CancelOperation", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MonitoringRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseMonitoringRestTransport._BaseDeleteOperation, MonitoringRestStub + ): + def __hash__(self): + return hash("MonitoringRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseMonitoringRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseMonitoringRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MonitoringRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseMonitoringRestTransport._BaseGetOperation, MonitoringRestStub + ): + def __hash__(self): + return hash("MonitoringRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = ( + _BaseMonitoringRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseMonitoringRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.GetOperation", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MonitoringRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseMonitoringRestTransport._BaseListOperations, MonitoringRestStub + ): + def __hash__(self): + return hash("MonitoringRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseMonitoringRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseMonitoringRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseMonitoringRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.cloudsecuritycompliance_v1.MonitoringClient.ListOperations", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = MonitoringRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("MonitoringRestTransport",) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest_base.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest_base.py new file mode 100644 index 000000000000..e20fb3e2cb9b --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest_base.py @@ -0,0 +1,526 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.cloudsecuritycompliance_v1.types import monitoring + +from .base import DEFAULT_CLIENT_INFO, MonitoringTransport + + +class _BaseMonitoringRestTransport(MonitoringTransport): + """Base REST backend transport for Monitoring. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "cloudsecuritycompliance.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudsecuritycompliance.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseAggregateFrameworkComplianceReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/frameworkComplianceReports/*}:aggregate", + }, + { + "method": "get", + "uri": "/v1/{name=folders/*/locations/*/frameworkComplianceReports/*}:aggregate", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/frameworkComplianceReports/*}:aggregate", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = monitoring.AggregateFrameworkComplianceReportRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMonitoringRestTransport._BaseAggregateFrameworkComplianceReport._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseFetchFrameworkComplianceReport: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/frameworkComplianceReports/*}:fetch", + }, + { + "method": "get", + "uri": "/v1/{name=folders/*/locations/*/frameworkComplianceReports/*}:fetch", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/frameworkComplianceReports/*}:fetch", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = monitoring.FetchFrameworkComplianceReportRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMonitoringRestTransport._BaseFetchFrameworkComplianceReport._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListControlComplianceSummaries: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*/frameworkComplianceReports/*}/controlComplianceSummaries", + }, + { + "method": "get", + "uri": "/v1/{parent=folders/*/locations/*/frameworkComplianceReports/*}/controlComplianceSummaries", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/frameworkComplianceReports/*}/controlComplianceSummaries", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = monitoring.ListControlComplianceSummariesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMonitoringRestTransport._BaseListControlComplianceSummaries._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListFindingSummaries: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/findingSummaries", + }, + { + "method": "get", + "uri": "/v1/{parent=folders/*/locations/*}/findingSummaries", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/findingSummaries", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = monitoring.ListFindingSummariesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMonitoringRestTransport._BaseListFindingSummaries._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListFrameworkComplianceSummaries: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/frameworkComplianceSummaries", + }, + { + "method": "get", + "uri": "/v1/{parent=folders/*/locations/*}/frameworkComplianceSummaries", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/frameworkComplianceSummaries", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = monitoring.ListFrameworkComplianceSummariesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseMonitoringRestTransport._BaseListFrameworkComplianceSummaries._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseMonitoringRestTransport",) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/__init__.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/__init__.py index d7bedb6888fa..52b649a192dc 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/__init__.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/__init__.py @@ -101,6 +101,30 @@ TargetResourceConfig, TargetResourceCreationConfig, ) +from .monitoring import ( + AggregatedComplianceReport, + AggregateFrameworkComplianceReportRequest, + AggregateFrameworkComplianceReportResponse, + CloudControlAssessmentDetails, + CloudControlReport, + ControlAssessmentDetails, + ControlComplianceSummary, + EvaluationState, + FetchFrameworkComplianceReportRequest, + FindingClass, + FindingSummary, + FrameworkComplianceReport, + FrameworkComplianceSummary, + ListControlComplianceSummariesRequest, + ListControlComplianceSummariesResponse, + ListFindingSummariesRequest, + ListFindingSummariesResponse, + ListFrameworkComplianceSummariesRequest, + ListFrameworkComplianceSummariesResponse, + ManualCloudControlAssessmentDetails, + SimilarControls, + TargetResourceDetails, +) __all__ = ( "BucketDestination", @@ -181,4 +205,26 @@ "TargetResourceConfig", "TargetResourceCreationConfig", "DeploymentState", + "AggregatedComplianceReport", + "AggregateFrameworkComplianceReportRequest", + "AggregateFrameworkComplianceReportResponse", + "CloudControlAssessmentDetails", + "CloudControlReport", + "ControlAssessmentDetails", + "ControlComplianceSummary", + "FetchFrameworkComplianceReportRequest", + "FindingSummary", + "FrameworkComplianceReport", + "FrameworkComplianceSummary", + "ListControlComplianceSummariesRequest", + "ListControlComplianceSummariesResponse", + "ListFindingSummariesRequest", + "ListFindingSummariesResponse", + "ListFrameworkComplianceSummariesRequest", + "ListFrameworkComplianceSummariesResponse", + "ManualCloudControlAssessmentDetails", + "SimilarControls", + "TargetResourceDetails", + "EvaluationState", + "FindingClass", ) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/monitoring.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/monitoring.py new file mode 100644 index 000000000000..d5d4d4750a9c --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/monitoring.py @@ -0,0 +1,1034 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import interval_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.cloudsecuritycompliance_v1.types import common + +__protobuf__ = proto.module( + package="google.cloud.cloudsecuritycompliance.v1", + manifest={ + "EvaluationState", + "FindingClass", + "ListFrameworkComplianceSummariesRequest", + "ListFrameworkComplianceSummariesResponse", + "FrameworkComplianceReport", + "FetchFrameworkComplianceReportRequest", + "ListFindingSummariesRequest", + "ListFindingSummariesResponse", + "ListControlComplianceSummariesRequest", + "ListControlComplianceSummariesResponse", + "AggregateFrameworkComplianceReportRequest", + "AggregateFrameworkComplianceReportResponse", + "ControlAssessmentDetails", + "FrameworkComplianceSummary", + "FindingSummary", + "ControlComplianceSummary", + "CloudControlReport", + "ManualCloudControlAssessmentDetails", + "CloudControlAssessmentDetails", + "SimilarControls", + "AggregatedComplianceReport", + "TargetResourceDetails", + }, +) + + +class EvaluationState(proto.Enum): + r"""The evaluation state of the control. + + Values: + EVALUATION_STATE_UNSPECIFIED (0): + Default value. This value is unused. + EVALUATION_STATE_PASSED (1): + The control is passing. + EVALUATION_STATE_FAILED (2): + The control is failing. + EVALUATION_STATE_NOT_ASSESSED (3): + The control is not assessed. + """ + EVALUATION_STATE_UNSPECIFIED = 0 + EVALUATION_STATE_PASSED = 1 + EVALUATION_STATE_FAILED = 2 + EVALUATION_STATE_NOT_ASSESSED = 3 + + +class FindingClass(proto.Enum): + r"""A finding is a record of assessment data like security, risk, + health, or privacy. + + Values: + FINDING_CLASS_UNSPECIFIED (0): + Default value. This value is unused. + THREAT (1): + The activity is unwanted or malicious. + VULNERABILITY (2): + A potential weakness in software that + increases risk to confidentiality, integrity, + and availability. + MISCONFIGURATION (3): + A potential weakness in a cloud resource or + asset configuration that increases risk. + OBSERVATION (4): + A security observation that is for + informational purposes. + SCC_ERROR (5): + An error that prevents Security Command + Center from functioning properly. + POSTURE_VIOLATION (6): + A potential security risk that's due to a + change in the security posture. + TOXIC_COMBINATION (7): + A combination of security issues that + represent a more severe security problem when + taken together. + SENSITIVE_DATA_RISK (8): + A potential security risk to data assets that + contain sensitive data. + CHOKEPOINT (9): + A resource or resource group where high risk + attack paths converge, based on attack path + simulations (APS). + """ + FINDING_CLASS_UNSPECIFIED = 0 + THREAT = 1 + VULNERABILITY = 2 + MISCONFIGURATION = 3 + OBSERVATION = 4 + SCC_ERROR = 5 + POSTURE_VIOLATION = 6 + TOXIC_COMBINATION = 7 + SENSITIVE_DATA_RISK = 8 + CHOKEPOINT = 9 + + +class ListFrameworkComplianceSummariesRequest(proto.Message): + r"""The request message for + [ListFrameworkComplianceSummariesRequest][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesRequest]. + + Attributes: + parent (str): + Required. The parent scope for the framework + compliance summary. + page_size (int): + Optional. The requested page size. The server + might return fewer items than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A token that identifies the page of + results that the server should return. + filter (str): + Optional. The filtering results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListFrameworkComplianceSummariesResponse(proto.Message): + r"""The response message for + [ListFrameworkComplianceSummariesResponse][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesResponse]. + + Attributes: + framework_compliance_summaries (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.FrameworkComplianceSummary]): + The list of framework compliance summaries. + next_page_token (str): + Output only. The token to retrieve the next + page of results. + """ + + @property + def raw_page(self): + return self + + framework_compliance_summaries: MutableSequence[ + "FrameworkComplianceSummary" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="FrameworkComplianceSummary", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class FrameworkComplianceReport(proto.Message): + r"""The response message for [GetFrameworkComplianceReport][]. + + Attributes: + framework (str): + The name of the framework. + framework_description (str): + The description of the framework. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last updated time of the + report. + control_assessment_details (google.cloud.cloudsecuritycompliance_v1.types.ControlAssessmentDetails): + The control assessment details of the + framework. + framework_type (google.cloud.cloudsecuritycompliance_v1.types.Framework.FrameworkType): + The type of framework. + supported_cloud_providers (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.CloudProvider]): + The list of cloud providers supported by the + framework. + framework_categories (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.FrameworkCategory]): + The list of framework categories supported. + framework_display_name (str): + Optional. The display name for the framework. + name (str): + Identifier. The name of the framework + compliance report. + major_revision_id (int): + The latest major revision ID of the + framework. + minor_revision_id (int): + The latest minor revision ID of the latest + major revision of the framework. + target_resource_details (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.TargetResourceDetails]): + The target resource details of the framework. + """ + + framework: str = proto.Field( + proto.STRING, + number=1, + ) + framework_description: str = proto.Field( + proto.STRING, + number=2, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + control_assessment_details: "ControlAssessmentDetails" = proto.Field( + proto.MESSAGE, + number=4, + message="ControlAssessmentDetails", + ) + framework_type: common.Framework.FrameworkType = proto.Field( + proto.ENUM, + number=5, + enum=common.Framework.FrameworkType, + ) + supported_cloud_providers: MutableSequence[ + common.CloudProvider + ] = proto.RepeatedField( + proto.ENUM, + number=6, + enum=common.CloudProvider, + ) + framework_categories: MutableSequence[ + common.FrameworkCategory + ] = proto.RepeatedField( + proto.ENUM, + number=7, + enum=common.FrameworkCategory, + ) + framework_display_name: str = proto.Field( + proto.STRING, + number=8, + ) + name: str = proto.Field( + proto.STRING, + number=9, + ) + major_revision_id: int = proto.Field( + proto.INT64, + number=10, + ) + minor_revision_id: int = proto.Field( + proto.INT64, + number=11, + ) + target_resource_details: MutableSequence[ + "TargetResourceDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message="TargetResourceDetails", + ) + + +class FetchFrameworkComplianceReportRequest(proto.Message): + r"""The request message for [FetchFrameworkComplianceReport][]. + + Attributes: + name (str): + Required. The name of the framework + compliance report to retrieve. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The end time of the report. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class ListFindingSummariesRequest(proto.Message): + r"""The request message for [ListFindingSummaries][]. + + Attributes: + parent (str): + Required. The parent scope for the framework + overview page. + page_size (int): + Optional. The requested page size. The server + might return fewer items than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A token that identifies the page of + results that the server should return. + filter (str): + Optional. The filtering results. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The end time of the finding + summary. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ListFindingSummariesResponse(proto.Message): + r"""The response message for [ListFindingSummaries][]. + + Attributes: + finding_summaries (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.FindingSummary]): + List of finding summary by category. + next_page_token (str): + Output only. The token to retrieve the next + page of results. + """ + + @property + def raw_page(self): + return self + + finding_summaries: MutableSequence["FindingSummary"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="FindingSummary", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListControlComplianceSummariesRequest(proto.Message): + r"""The request message for [ListControlComplianceSummaries][]. + + Attributes: + parent (str): + Required. The parent scope for the framework + overview page. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The end time of the control + compliance summary. + page_size (int): + Optional. The requested page size. The server + might return fewer items than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A token that identifies the page of + results that the server should return. + filter (str): + Optional. The filtering results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListControlComplianceSummariesResponse(proto.Message): + r"""The response message for [ListControlComplianceSummaries][]. + + Attributes: + control_compliance_summaries (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.ControlComplianceSummary]): + The list of control compliance details. + next_page_token (str): + Output only. The token to retrieve the next + page of results. + """ + + @property + def raw_page(self): + return self + + control_compliance_summaries: MutableSequence[ + "ControlComplianceSummary" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ControlComplianceSummary", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AggregateFrameworkComplianceReportRequest(proto.Message): + r"""The request message for [AggregateFrameworkComplianceReport][]. + + Attributes: + name (str): + Required. The name of the aggregated compliance report over + time to retrieve. + + The supported format is: + ``organizations/{organization_id}/locations/{location}/frameworkComplianceReports/{framework_compliance_report}`` + interval (google.type.interval_pb2.Interval): + Optional. The start and end time range for + the aggregated compliance report. + filter (str): + Optional. The filtering results. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + interval: interval_pb2.Interval = proto.Field( + proto.MESSAGE, + number=2, + message=interval_pb2.Interval, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AggregateFrameworkComplianceReportResponse(proto.Message): + r"""The response message for [AggregateFrameworkComplianceReport][]. + + Attributes: + aggregated_compliance_reports (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.AggregatedComplianceReport]): + The list of aggregated compliance reports. + """ + + aggregated_compliance_reports: MutableSequence[ + "AggregatedComplianceReport" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AggregatedComplianceReport", + ) + + +class ControlAssessmentDetails(proto.Message): + r"""The details for a control assessment. + + Attributes: + passing_controls (int): + The number of controls that are passing or + not assessed. + failing_controls (int): + The number of controls that are failing. + assessed_passing_controls (int): + The number of controls that were assessed and + are passing. + not_assessed_controls (int): + The number of controls that aren't assessed + because they require manual review. + """ + + passing_controls: int = proto.Field( + proto.INT32, + number=1, + ) + failing_controls: int = proto.Field( + proto.INT32, + number=2, + ) + assessed_passing_controls: int = proto.Field( + proto.INT32, + number=3, + ) + not_assessed_controls: int = proto.Field( + proto.INT32, + number=4, + ) + + +class FrameworkComplianceSummary(proto.Message): + r"""The details for a framework compliance summary. + + Attributes: + framework (str): + The name of the framework. + control_assessment_details (google.cloud.cloudsecuritycompliance_v1.types.ControlAssessmentDetails): + The control assessment details of the + framework. + framework_type (google.cloud.cloudsecuritycompliance_v1.types.Framework.FrameworkType): + The type of framework. + supported_cloud_providers (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.CloudProvider]): + The list of cloud providers supported by the + framework. + framework_categories (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.FrameworkCategory]): + The list of framework categories supported by + the framework. + framework_display_name (str): + Optional. The display name for the framework. + name (str): + Identifier. The name of the framework + compliance summary. + major_revision_id (int): + The major revision ID of the framework. + minor_revision_id (int): + The minor revision ID of the framework. + target_resource_details (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.TargetResourceDetails]): + The target resource details for the + framework. + """ + + framework: str = proto.Field( + proto.STRING, + number=1, + ) + control_assessment_details: "ControlAssessmentDetails" = proto.Field( + proto.MESSAGE, + number=2, + message="ControlAssessmentDetails", + ) + framework_type: common.Framework.FrameworkType = proto.Field( + proto.ENUM, + number=3, + enum=common.Framework.FrameworkType, + ) + supported_cloud_providers: MutableSequence[ + common.CloudProvider + ] = proto.RepeatedField( + proto.ENUM, + number=4, + enum=common.CloudProvider, + ) + framework_categories: MutableSequence[ + common.FrameworkCategory + ] = proto.RepeatedField( + proto.ENUM, + number=5, + enum=common.FrameworkCategory, + ) + framework_display_name: str = proto.Field( + proto.STRING, + number=6, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + major_revision_id: int = proto.Field( + proto.INT64, + number=8, + ) + minor_revision_id: int = proto.Field( + proto.INT64, + number=9, + ) + target_resource_details: MutableSequence[ + "TargetResourceDetails" + ] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="TargetResourceDetails", + ) + + +class FindingSummary(proto.Message): + r"""The details for a finding. + + Attributes: + finding_category (str): + The category of the finding. + finding_class (google.cloud.cloudsecuritycompliance_v1.types.FindingClass): + The class of the finding. + severity (google.cloud.cloudsecuritycompliance_v1.types.Severity): + The severity of the finding. + finding_count (int): + The count of the finding. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last updated time of the + finding. + related_frameworks (MutableSequence[str]): + Optional. The list of compliance frameworks + that the finding belongs to. + name (str): + Identifier. The name of the finding summary. + """ + + finding_category: str = proto.Field( + proto.STRING, + number=1, + ) + finding_class: "FindingClass" = proto.Field( + proto.ENUM, + number=2, + enum="FindingClass", + ) + severity: common.Severity = proto.Field( + proto.ENUM, + number=3, + enum=common.Severity, + ) + finding_count: int = proto.Field( + proto.INT64, + number=4, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + related_frameworks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ControlComplianceSummary(proto.Message): + r"""The details for control compliance. + + Attributes: + control (str): + The name of the control. + display_name (str): + The display name of the control. + description (str): + The description of the control. + overall_evaluation_state (google.cloud.cloudsecuritycompliance_v1.types.EvaluationState): + Output only. The overall evaluation status of + the control. + total_findings_count (int): + The total number of findings for the control. + compliance_frameworks (MutableSequence[str]): + The list of compliance frameworks that the + control belongs to. + similar_controls (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.SimilarControls]): + The list of similar controls. + cloud_control_reports (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.CloudControlReport]): + The list of cloud control reports. + control_responsibility_type (google.cloud.cloudsecuritycompliance_v1.types.RegulatoryControlResponsibilityType): + The responsibility type for the control. + is_fake_control (bool): + Whether the control is a fake control. Fake + controls are created and mapped to cloud + controls that don't belong to a control group. + name (str): + Identifier. The name of the control + compliance summary. + """ + + control: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + overall_evaluation_state: "EvaluationState" = proto.Field( + proto.ENUM, + number=4, + enum="EvaluationState", + ) + total_findings_count: int = proto.Field( + proto.INT32, + number=5, + ) + compliance_frameworks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + similar_controls: MutableSequence["SimilarControls"] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="SimilarControls", + ) + cloud_control_reports: MutableSequence["CloudControlReport"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="CloudControlReport", + ) + control_responsibility_type: common.RegulatoryControlResponsibilityType = ( + proto.Field( + proto.ENUM, + number=9, + enum=common.RegulatoryControlResponsibilityType, + ) + ) + is_fake_control: bool = proto.Field( + proto.BOOL, + number=10, + ) + name: str = proto.Field( + proto.STRING, + number=11, + ) + + +class CloudControlReport(proto.Message): + r"""The cloud control report. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + manual_cloud_control_assessment_details (google.cloud.cloudsecuritycompliance_v1.types.ManualCloudControlAssessmentDetails): + The details of a manual cloud control + assessment. + + This field is a member of `oneof`_ ``assessment_details``. + cloud_control_assessment_details (google.cloud.cloudsecuritycompliance_v1.types.CloudControlAssessmentDetails): + The details of a cloud control assessment. + + This field is a member of `oneof`_ ``assessment_details``. + cloud_control (str): + The name of the cloud control. + display_name (str): + The display name of the cloud control. + description (str): + The description of the cloud control. + categories (MutableSequence[str]): + The list of categories for the cloud control. + similar_controls (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.SimilarControls]): + The list of similar controls. + cloud_control_type (google.cloud.cloudsecuritycompliance_v1.types.CloudControl.Type): + The type of the cloud control. + finding_category (str): + The category of the finding. + rules (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.Rule]): + The list of rules that correspond to the + cloud control. + finding_severity (google.cloud.cloudsecuritycompliance_v1.types.Severity): + The severity of the finding. + enforcement_mode (google.cloud.cloudsecuritycompliance_v1.types.EnforcementMode): + The enforcement mode of the cloud control. + cloud_control_deployment (str): + The name of the cloud control deployment. + major_revision_id (int): + The major revision ID of the cloud control. + minor_revision_id (int): + The minor revision ID of the cloud control. + framework_major_revision_ids (MutableSequence[int]): + The major revision IDs of the frameworks that + the cloud control belongs to. + """ + + manual_cloud_control_assessment_details: "ManualCloudControlAssessmentDetails" = ( + proto.Field( + proto.MESSAGE, + number=13, + oneof="assessment_details", + message="ManualCloudControlAssessmentDetails", + ) + ) + cloud_control_assessment_details: "CloudControlAssessmentDetails" = proto.Field( + proto.MESSAGE, + number=14, + oneof="assessment_details", + message="CloudControlAssessmentDetails", + ) + cloud_control: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + categories: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + similar_controls: MutableSequence["SimilarControls"] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="SimilarControls", + ) + cloud_control_type: common.CloudControl.Type = proto.Field( + proto.ENUM, + number=10, + enum=common.CloudControl.Type, + ) + finding_category: str = proto.Field( + proto.STRING, + number=11, + ) + rules: MutableSequence[common.Rule] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message=common.Rule, + ) + finding_severity: common.Severity = proto.Field( + proto.ENUM, + number=15, + enum=common.Severity, + ) + enforcement_mode: common.EnforcementMode = proto.Field( + proto.ENUM, + number=16, + enum=common.EnforcementMode, + ) + cloud_control_deployment: str = proto.Field( + proto.STRING, + number=17, + ) + major_revision_id: int = proto.Field( + proto.INT64, + number=18, + ) + minor_revision_id: int = proto.Field( + proto.INT64, + number=19, + ) + framework_major_revision_ids: MutableSequence[int] = proto.RepeatedField( + proto.INT64, + number=20, + ) + + +class ManualCloudControlAssessmentDetails(proto.Message): + r"""The details for a manual cloud control assessment. + + Attributes: + manual_cloud_control_guide (MutableSequence[str]): + The guide for assessing a cloud control + manually. + """ + + manual_cloud_control_guide: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class CloudControlAssessmentDetails(proto.Message): + r"""The cloud control assessment details for non-manual cloud + controls. + + Attributes: + findings_count (int): + The number of findings for the cloud control. + evaluation_state (google.cloud.cloudsecuritycompliance_v1.types.EvaluationState): + Output only. The evaluation status of the + cloud control. + """ + + findings_count: int = proto.Field( + proto.INT32, + number=1, + ) + evaluation_state: "EvaluationState" = proto.Field( + proto.ENUM, + number=2, + enum="EvaluationState", + ) + + +class SimilarControls(proto.Message): + r"""The similar controls. + + Attributes: + framework (str): + The name of the framework. + control_id (str): + The ID of the control. + """ + + framework: str = proto.Field( + proto.STRING, + number=1, + ) + control_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class AggregatedComplianceReport(proto.Message): + r"""The aggregated compliance report. + + Attributes: + control_assessment_details (google.cloud.cloudsecuritycompliance_v1.types.ControlAssessmentDetails): + The control assessment details of the + framework. + report_time (google.protobuf.timestamp_pb2.Timestamp): + The report time of the aggregated compliance + report. + """ + + control_assessment_details: "ControlAssessmentDetails" = proto.Field( + proto.MESSAGE, + number=1, + message="ControlAssessmentDetails", + ) + report_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class TargetResourceDetails(proto.Message): + r"""The details for a target resource. + + Attributes: + framework_deployment (str): + The framework deployment name for the target resource. + + For example, + ``organizations/{organization_id}/locations/{location}/frameworkDeployments/{framework_deployment_id}`` + target_resource_display_name (str): + The display name of the target resource. For example, + ``google.com``, ``staging-project``, or + ``development-folder``. + target_resource (str): + The target resource. For example, + ``organizations/1234567890``, ``projects/1234567890``, or + ``folders/1234567890``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of the target resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The update time of the target resource. + major_revision_id (int): + The major revision ID of the framework for + the target resource. + minor_revision_id (int): + The minor revision ID of the framework for + the target resource. + """ + + framework_deployment: str = proto.Field( + proto.STRING, + number=1, + ) + target_resource_display_name: str = proto.Field( + proto.STRING, + number=2, + ) + target_resource: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + major_revision_id: int = proto.Field( + proto.INT64, + number=6, + ) + minor_revision_id: int = proto.Field( + proto.INT64, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_async.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_async.py new file mode 100644 index 000000000000..75d3cdcec48c --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregateFrameworkComplianceReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_AggregateFrameworkComplianceReport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +async def sample_aggregate_framework_compliance_report(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.AggregateFrameworkComplianceReportRequest( + name="name_value", + ) + + # Make the request + response = await client.aggregate_framework_compliance_report(request=request) + + # Handle the response + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_AggregateFrameworkComplianceReport_async] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_sync.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_sync.py new file mode 100644 index 000000000000..85809da6115e --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregateFrameworkComplianceReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_AggregateFrameworkComplianceReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +def sample_aggregate_framework_compliance_report(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.AggregateFrameworkComplianceReportRequest( + name="name_value", + ) + + # Make the request + response = client.aggregate_framework_compliance_report(request=request) + + # Handle the response + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_AggregateFrameworkComplianceReport_sync] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_async.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_async.py new file mode 100644 index 000000000000..99c42b9a04ed --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchFrameworkComplianceReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_FetchFrameworkComplianceReport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +async def sample_fetch_framework_compliance_report(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.FetchFrameworkComplianceReportRequest( + name="name_value", + ) + + # Make the request + response = await client.fetch_framework_compliance_report(request=request) + + # Handle the response + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_FetchFrameworkComplianceReport_async] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_sync.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_sync.py new file mode 100644 index 000000000000..87ed6dba9cc5 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchFrameworkComplianceReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_FetchFrameworkComplianceReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +def sample_fetch_framework_compliance_report(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.FetchFrameworkComplianceReportRequest( + name="name_value", + ) + + # Make the request + response = client.fetch_framework_compliance_report(request=request) + + # Handle the response + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_FetchFrameworkComplianceReport_sync] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_async.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_async.py new file mode 100644 index 000000000000..b71d72b18118 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListControlComplianceSummaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_ListControlComplianceSummaries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +async def sample_list_control_compliance_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListControlComplianceSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_control_compliance_summaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_ListControlComplianceSummaries_async] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_sync.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_sync.py new file mode 100644 index 000000000000..04a10778251c --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListControlComplianceSummaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_ListControlComplianceSummaries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +def sample_list_control_compliance_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListControlComplianceSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_control_compliance_summaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_ListControlComplianceSummaries_sync] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_async.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_async.py new file mode 100644 index 000000000000..4e10aad8c85b --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFindingSummaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_ListFindingSummaries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +async def sample_list_finding_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListFindingSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_finding_summaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_ListFindingSummaries_async] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_sync.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_sync.py new file mode 100644 index 000000000000..7171c7f35929 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFindingSummaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_ListFindingSummaries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +def sample_list_finding_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListFindingSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_finding_summaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_ListFindingSummaries_sync] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_async.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_async.py new file mode 100644 index 000000000000..961d7d163740 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFrameworkComplianceSummaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_ListFrameworkComplianceSummaries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +async def sample_list_framework_compliance_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringAsyncClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListFrameworkComplianceSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_framework_compliance_summaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_ListFrameworkComplianceSummaries_async] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_sync.py b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_sync.py new file mode 100644 index 000000000000..089f739058ac --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFrameworkComplianceSummaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-cloudsecuritycompliance + + +# [START cloudsecuritycompliance_v1_generated_Monitoring_ListFrameworkComplianceSummaries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import cloudsecuritycompliance_v1 + + +def sample_list_framework_compliance_summaries(): + # Create a client + client = cloudsecuritycompliance_v1.MonitoringClient() + + # Initialize request argument(s) + request = cloudsecuritycompliance_v1.ListFrameworkComplianceSummariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_framework_compliance_summaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END cloudsecuritycompliance_v1_generated_Monitoring_ListFrameworkComplianceSummaries_sync] diff --git a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/snippet_metadata_google.cloud.cloudsecuritycompliance.v1.json b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/snippet_metadata_google.cloud.cloudsecuritycompliance.v1.json index 4a4fcab781bf..523bfe341d28 100644 --- a/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/snippet_metadata_google.cloud.cloudsecuritycompliance.v1.json +++ b/packages/google-cloud-cloudsecuritycompliance/samples/generated_samples/snippet_metadata_google.cloud.cloudsecuritycompliance.v1.json @@ -3644,6 +3644,811 @@ } ], "title": "cloudsecuritycompliance_v1_generated_deployment_list_framework_deployments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient", + "shortName": "MonitoringAsyncClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.aggregate_framework_compliance_report", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.AggregateFrameworkComplianceReport", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "AggregateFrameworkComplianceReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.AggregateFrameworkComplianceReportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.types.AggregateFrameworkComplianceReportResponse", + "shortName": "aggregate_framework_compliance_report" + }, + "description": "Sample for AggregateFrameworkComplianceReport", + "file": "cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_AggregateFrameworkComplianceReport_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient", + "shortName": "MonitoringClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient.aggregate_framework_compliance_report", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.AggregateFrameworkComplianceReport", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "AggregateFrameworkComplianceReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.AggregateFrameworkComplianceReportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.types.AggregateFrameworkComplianceReportResponse", + "shortName": "aggregate_framework_compliance_report" + }, + "description": "Sample for AggregateFrameworkComplianceReport", + "file": "cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_AggregateFrameworkComplianceReport_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_aggregate_framework_compliance_report_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient", + "shortName": "MonitoringAsyncClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.fetch_framework_compliance_report", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.FetchFrameworkComplianceReport", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "FetchFrameworkComplianceReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.FetchFrameworkComplianceReportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.types.FrameworkComplianceReport", + "shortName": "fetch_framework_compliance_report" + }, + "description": "Sample for FetchFrameworkComplianceReport", + "file": "cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_FetchFrameworkComplianceReport_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient", + "shortName": "MonitoringClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient.fetch_framework_compliance_report", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.FetchFrameworkComplianceReport", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "FetchFrameworkComplianceReport" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.FetchFrameworkComplianceReportRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.types.FrameworkComplianceReport", + "shortName": "fetch_framework_compliance_report" + }, + "description": "Sample for FetchFrameworkComplianceReport", + "file": "cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_FetchFrameworkComplianceReport_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_fetch_framework_compliance_report_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient", + "shortName": "MonitoringAsyncClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.list_control_compliance_summaries", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.ListControlComplianceSummaries", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "ListControlComplianceSummaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListControlComplianceSummariesAsyncPager", + "shortName": "list_control_compliance_summaries" + }, + "description": "Sample for ListControlComplianceSummaries", + "file": "cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_ListControlComplianceSummaries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient", + "shortName": "MonitoringClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient.list_control_compliance_summaries", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.ListControlComplianceSummaries", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "ListControlComplianceSummaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.ListControlComplianceSummariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListControlComplianceSummariesPager", + "shortName": "list_control_compliance_summaries" + }, + "description": "Sample for ListControlComplianceSummaries", + "file": "cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_ListControlComplianceSummaries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_list_control_compliance_summaries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient", + "shortName": "MonitoringAsyncClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.list_finding_summaries", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.ListFindingSummaries", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "ListFindingSummaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListFindingSummariesAsyncPager", + "shortName": "list_finding_summaries" + }, + "description": "Sample for ListFindingSummaries", + "file": "cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_ListFindingSummaries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient", + "shortName": "MonitoringClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient.list_finding_summaries", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.ListFindingSummaries", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "ListFindingSummaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.ListFindingSummariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListFindingSummariesPager", + "shortName": "list_finding_summaries" + }, + "description": "Sample for ListFindingSummaries", + "file": "cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_ListFindingSummaries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_list_finding_summaries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient", + "shortName": "MonitoringAsyncClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringAsyncClient.list_framework_compliance_summaries", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.ListFrameworkComplianceSummaries", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "ListFrameworkComplianceSummaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListFrameworkComplianceSummariesAsyncPager", + "shortName": "list_framework_compliance_summaries" + }, + "description": "Sample for ListFrameworkComplianceSummaries", + "file": "cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_ListFrameworkComplianceSummaries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient", + "shortName": "MonitoringClient" + }, + "fullName": "google.cloud.cloudsecuritycompliance_v1.MonitoringClient.list_framework_compliance_summaries", + "method": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring.ListFrameworkComplianceSummaries", + "service": { + "fullName": "google.cloud.cloudsecuritycompliance.v1.Monitoring", + "shortName": "Monitoring" + }, + "shortName": "ListFrameworkComplianceSummaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.cloudsecuritycompliance_v1.types.ListFrameworkComplianceSummariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.cloudsecuritycompliance_v1.services.monitoring.pagers.ListFrameworkComplianceSummariesPager", + "shortName": "list_framework_compliance_summaries" + }, + "description": "Sample for ListFrameworkComplianceSummaries", + "file": "cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudsecuritycompliance_v1_generated_Monitoring_ListFrameworkComplianceSummaries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudsecuritycompliance_v1_generated_monitoring_list_framework_compliance_summaries_sync.py" } ] } diff --git a/packages/google-cloud-cloudsecuritycompliance/scripts/fixup_cloudsecuritycompliance_v1_keywords.py b/packages/google-cloud-cloudsecuritycompliance/scripts/fixup_cloudsecuritycompliance_v1_keywords.py index 92224c429cf7..e05ac1677e0f 100644 --- a/packages/google-cloud-cloudsecuritycompliance/scripts/fixup_cloudsecuritycompliance_v1_keywords.py +++ b/packages/google-cloud-cloudsecuritycompliance/scripts/fixup_cloudsecuritycompliance_v1_keywords.py @@ -39,6 +39,7 @@ def partition( class cloudsecuritycomplianceCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'aggregate_framework_compliance_report': ('name', 'interval', 'filter', ), 'calculate_effective_cm_enrollment': ('name', ), 'create_cloud_control': ('parent', 'cloud_control_id', 'cloud_control', ), 'create_framework': ('parent', 'framework_id', 'framework', ), @@ -47,6 +48,7 @@ class cloudsecuritycomplianceCallTransformer(cst.CSTTransformer): 'delete_cloud_control': ('name', ), 'delete_framework': ('name', ), 'delete_framework_deployment': ('name', 'etag', ), + 'fetch_framework_compliance_report': ('name', 'end_time', ), 'generate_framework_audit_scope_report': ('scope', 'report_format', 'compliance_framework', ), 'get_cloud_control': ('name', 'major_revision_id', ), 'get_cloud_control_deployment': ('name', ), @@ -55,7 +57,10 @@ class cloudsecuritycomplianceCallTransformer(cst.CSTTransformer): 'get_framework_deployment': ('name', ), 'list_cloud_control_deployments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_cloud_controls': ('parent', 'page_size', 'page_token', ), + 'list_control_compliance_summaries': ('parent', 'end_time', 'page_size', 'page_token', 'filter', ), + 'list_finding_summaries': ('parent', 'page_size', 'page_token', 'filter', 'end_time', ), 'list_framework_audits': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_framework_compliance_summaries': ('parent', 'page_size', 'page_token', 'filter', ), 'list_framework_deployments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_frameworks': ('parent', 'page_size', 'page_token', ), 'update_cloud_control': ('cloud_control', 'update_mask', ), diff --git a/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_monitoring.py b/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_monitoring.py new file mode 100644 index 000000000000..c11d42c0d618 --- /dev/null +++ b/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_monitoring.py @@ -0,0 +1,7873 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import interval_pb2 # type: ignore + +from google.cloud.cloudsecuritycompliance_v1.services.monitoring import ( + MonitoringAsyncClient, + MonitoringClient, + pagers, + transports, +) +from google.cloud.cloudsecuritycompliance_v1.types import common, monitoring + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MonitoringClient._get_default_mtls_endpoint(None) is None + assert ( + MonitoringClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + MonitoringClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + MonitoringClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + MonitoringClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert MonitoringClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert MonitoringClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MonitoringClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MonitoringClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + MonitoringClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MonitoringClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MonitoringClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MonitoringClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MonitoringClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MonitoringClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MonitoringClient._get_client_cert_source(None, False) is None + assert ( + MonitoringClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + MonitoringClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + MonitoringClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + MonitoringClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + MonitoringClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MonitoringClient), +) +@mock.patch.object( + MonitoringAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MonitoringAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = MonitoringClient._DEFAULT_UNIVERSE + default_endpoint = MonitoringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = MonitoringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + MonitoringClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + MonitoringClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == MonitoringClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MonitoringClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + MonitoringClient._get_api_endpoint(None, None, default_universe, "always") + == MonitoringClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MonitoringClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == MonitoringClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + MonitoringClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + MonitoringClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + MonitoringClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + MonitoringClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + MonitoringClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + MonitoringClient._get_universe_domain(None, None) + == MonitoringClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + MonitoringClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MonitoringClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MonitoringClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MonitoringClient, "grpc"), + (MonitoringAsyncClient, "grpc_asyncio"), + (MonitoringClient, "rest"), + ], +) +def test_monitoring_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudsecuritycompliance.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsecuritycompliance.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.MonitoringGrpcTransport, "grpc"), + (transports.MonitoringGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MonitoringRestTransport, "rest"), + ], +) +def test_monitoring_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (MonitoringClient, "grpc"), + (MonitoringAsyncClient, "grpc_asyncio"), + (MonitoringClient, "rest"), + ], +) +def test_monitoring_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "cloudsecuritycompliance.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsecuritycompliance.googleapis.com" + ) + + +def test_monitoring_client_get_transport_class(): + transport = MonitoringClient.get_transport_class() + available_transports = [ + transports.MonitoringGrpcTransport, + transports.MonitoringRestTransport, + ] + assert transport in available_transports + + transport = MonitoringClient.get_transport_class("grpc") + assert transport == transports.MonitoringGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MonitoringClient, transports.MonitoringGrpcTransport, "grpc"), + ( + MonitoringAsyncClient, + transports.MonitoringGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (MonitoringClient, transports.MonitoringRestTransport, "rest"), + ], +) +@mock.patch.object( + MonitoringClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MonitoringClient), +) +@mock.patch.object( + MonitoringAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MonitoringAsyncClient), +) +def test_monitoring_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MonitoringClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MonitoringClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (MonitoringClient, transports.MonitoringGrpcTransport, "grpc", "true"), + ( + MonitoringAsyncClient, + transports.MonitoringGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (MonitoringClient, transports.MonitoringGrpcTransport, "grpc", "false"), + ( + MonitoringAsyncClient, + transports.MonitoringGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (MonitoringClient, transports.MonitoringRestTransport, "rest", "true"), + (MonitoringClient, transports.MonitoringRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + MonitoringClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MonitoringClient), +) +@mock.patch.object( + MonitoringAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MonitoringAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_monitoring_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [MonitoringClient, MonitoringAsyncClient]) +@mock.patch.object( + MonitoringClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MonitoringClient) +) +@mock.patch.object( + MonitoringAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MonitoringAsyncClient), +) +def test_monitoring_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [MonitoringClient, MonitoringAsyncClient]) +@mock.patch.object( + MonitoringClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MonitoringClient), +) +@mock.patch.object( + MonitoringAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(MonitoringAsyncClient), +) +def test_monitoring_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MonitoringClient._DEFAULT_UNIVERSE + default_endpoint = MonitoringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = MonitoringClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (MonitoringClient, transports.MonitoringGrpcTransport, "grpc"), + ( + MonitoringAsyncClient, + transports.MonitoringGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (MonitoringClient, transports.MonitoringRestTransport, "rest"), + ], +) +def test_monitoring_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (MonitoringClient, transports.MonitoringGrpcTransport, "grpc", grpc_helpers), + ( + MonitoringAsyncClient, + transports.MonitoringGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (MonitoringClient, transports.MonitoringRestTransport, "rest", None), + ], +) +def test_monitoring_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_monitoring_client_client_options_from_dict(): + with mock.patch( + "google.cloud.cloudsecuritycompliance_v1.services.monitoring.transports.MonitoringGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = MonitoringClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (MonitoringClient, transports.MonitoringGrpcTransport, "grpc", grpc_helpers), + ( + MonitoringAsyncClient, + transports.MonitoringGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_monitoring_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudsecuritycompliance.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="cloudsecuritycompliance.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.ListFrameworkComplianceSummariesRequest, + dict, + ], +) +def test_list_framework_compliance_summaries(request_type, transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListFrameworkComplianceSummariesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_framework_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = monitoring.ListFrameworkComplianceSummariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFrameworkComplianceSummariesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_framework_compliance_summaries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = monitoring.ListFrameworkComplianceSummariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_framework_compliance_summaries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == monitoring.ListFrameworkComplianceSummariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_framework_compliance_summaries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_framework_compliance_summaries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_framework_compliance_summaries + ] = mock_rpc + request = {} + client.list_framework_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_framework_compliance_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_framework_compliance_summaries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_framework_compliance_summaries + ] = mock_rpc + + request = {} + await client.list_framework_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_framework_compliance_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_async( + transport: str = "grpc_asyncio", + request_type=monitoring.ListFrameworkComplianceSummariesRequest, +): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListFrameworkComplianceSummariesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_framework_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = monitoring.ListFrameworkComplianceSummariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFrameworkComplianceSummariesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_async_from_dict(): + await test_list_framework_compliance_summaries_async(request_type=dict) + + +def test_list_framework_compliance_summaries_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.ListFrameworkComplianceSummariesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + call.return_value = monitoring.ListFrameworkComplianceSummariesResponse() + client.list_framework_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.ListFrameworkComplianceSummariesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListFrameworkComplianceSummariesResponse() + ) + await client.list_framework_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_framework_compliance_summaries_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListFrameworkComplianceSummariesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_framework_compliance_summaries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_framework_compliance_summaries_flattened_error(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_framework_compliance_summaries( + monitoring.ListFrameworkComplianceSummariesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_flattened_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListFrameworkComplianceSummariesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListFrameworkComplianceSummariesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_framework_compliance_summaries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_flattened_error_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_framework_compliance_summaries( + monitoring.ListFrameworkComplianceSummariesRequest(), + parent="parent_value", + ) + + +def test_list_framework_compliance_summaries_pager(transport_name: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_framework_compliance_summaries( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, monitoring.FrameworkComplianceSummary) for i in results + ) + + +def test_list_framework_compliance_summaries_pages(transport_name: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + ), + RuntimeError, + ) + pages = list(client.list_framework_compliance_summaries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_async_pager(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_framework_compliance_summaries( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, monitoring.FrameworkComplianceSummary) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_async_pages(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_framework_compliance_summaries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.ListFindingSummariesRequest, + dict, + ], +) +def test_list_finding_summaries(request_type, transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListFindingSummariesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_finding_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = monitoring.ListFindingSummariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFindingSummariesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_finding_summaries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = monitoring.ListFindingSummariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_finding_summaries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == monitoring.ListFindingSummariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_finding_summaries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_finding_summaries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_finding_summaries + ] = mock_rpc + request = {} + client.list_finding_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_finding_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_finding_summaries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_finding_summaries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_finding_summaries + ] = mock_rpc + + request = {} + await client.list_finding_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_finding_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_finding_summaries_async( + transport: str = "grpc_asyncio", request_type=monitoring.ListFindingSummariesRequest +): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListFindingSummariesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_finding_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = monitoring.ListFindingSummariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFindingSummariesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_finding_summaries_async_from_dict(): + await test_list_finding_summaries_async(request_type=dict) + + +def test_list_finding_summaries_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.ListFindingSummariesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + call.return_value = monitoring.ListFindingSummariesResponse() + client.list_finding_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_finding_summaries_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.ListFindingSummariesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListFindingSummariesResponse() + ) + await client.list_finding_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_finding_summaries_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListFindingSummariesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_finding_summaries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_finding_summaries_flattened_error(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_finding_summaries( + monitoring.ListFindingSummariesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_finding_summaries_flattened_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListFindingSummariesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListFindingSummariesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_finding_summaries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_finding_summaries_flattened_error_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_finding_summaries( + monitoring.ListFindingSummariesRequest(), + parent="parent_value", + ) + + +def test_list_finding_summaries_pager(transport_name: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[], + next_page_token="def", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_finding_summaries(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, monitoring.FindingSummary) for i in results) + + +def test_list_finding_summaries_pages(transport_name: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[], + next_page_token="def", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + ), + RuntimeError, + ) + pages = list(client.list_finding_summaries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_finding_summaries_async_pager(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[], + next_page_token="def", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_finding_summaries( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, monitoring.FindingSummary) for i in responses) + + +@pytest.mark.asyncio +async def test_list_finding_summaries_async_pages(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[], + next_page_token="def", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_finding_summaries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.FetchFrameworkComplianceReportRequest, + dict, + ], +) +def test_fetch_framework_compliance_report(request_type, transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.FrameworkComplianceReport( + framework="framework_value", + framework_description="framework_description_value", + framework_type=common.Framework.FrameworkType.BUILT_IN, + supported_cloud_providers=[common.CloudProvider.AWS], + framework_categories=[common.FrameworkCategory.INDUSTRY_DEFINED_STANDARD], + framework_display_name="framework_display_name_value", + name="name_value", + major_revision_id=1811, + minor_revision_id=1823, + ) + response = client.fetch_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = monitoring.FetchFrameworkComplianceReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, monitoring.FrameworkComplianceReport) + assert response.framework == "framework_value" + assert response.framework_description == "framework_description_value" + assert response.framework_type == common.Framework.FrameworkType.BUILT_IN + assert response.supported_cloud_providers == [common.CloudProvider.AWS] + assert response.framework_categories == [ + common.FrameworkCategory.INDUSTRY_DEFINED_STANDARD + ] + assert response.framework_display_name == "framework_display_name_value" + assert response.name == "name_value" + assert response.major_revision_id == 1811 + assert response.minor_revision_id == 1823 + + +def test_fetch_framework_compliance_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = monitoring.FetchFrameworkComplianceReportRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_framework_compliance_report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == monitoring.FetchFrameworkComplianceReportRequest( + name="name_value", + ) + + +def test_fetch_framework_compliance_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_framework_compliance_report + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_framework_compliance_report + ] = mock_rpc + request = {} + client.fetch_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_framework_compliance_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_framework_compliance_report_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.fetch_framework_compliance_report + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_framework_compliance_report + ] = mock_rpc + + request = {} + await client.fetch_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.fetch_framework_compliance_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_fetch_framework_compliance_report_async( + transport: str = "grpc_asyncio", + request_type=monitoring.FetchFrameworkComplianceReportRequest, +): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.FrameworkComplianceReport( + framework="framework_value", + framework_description="framework_description_value", + framework_type=common.Framework.FrameworkType.BUILT_IN, + supported_cloud_providers=[common.CloudProvider.AWS], + framework_categories=[ + common.FrameworkCategory.INDUSTRY_DEFINED_STANDARD + ], + framework_display_name="framework_display_name_value", + name="name_value", + major_revision_id=1811, + minor_revision_id=1823, + ) + ) + response = await client.fetch_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = monitoring.FetchFrameworkComplianceReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, monitoring.FrameworkComplianceReport) + assert response.framework == "framework_value" + assert response.framework_description == "framework_description_value" + assert response.framework_type == common.Framework.FrameworkType.BUILT_IN + assert response.supported_cloud_providers == [common.CloudProvider.AWS] + assert response.framework_categories == [ + common.FrameworkCategory.INDUSTRY_DEFINED_STANDARD + ] + assert response.framework_display_name == "framework_display_name_value" + assert response.name == "name_value" + assert response.major_revision_id == 1811 + assert response.minor_revision_id == 1823 + + +@pytest.mark.asyncio +async def test_fetch_framework_compliance_report_async_from_dict(): + await test_fetch_framework_compliance_report_async(request_type=dict) + + +def test_fetch_framework_compliance_report_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.FetchFrameworkComplianceReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + call.return_value = monitoring.FrameworkComplianceReport() + client.fetch_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_framework_compliance_report_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.FetchFrameworkComplianceReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.FrameworkComplianceReport() + ) + await client.fetch_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_fetch_framework_compliance_report_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.FrameworkComplianceReport() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_framework_compliance_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_fetch_framework_compliance_report_flattened_error(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_framework_compliance_report( + monitoring.FetchFrameworkComplianceReportRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_framework_compliance_report_flattened_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.FrameworkComplianceReport() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.FrameworkComplianceReport() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_framework_compliance_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_framework_compliance_report_flattened_error_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_framework_compliance_report( + monitoring.FetchFrameworkComplianceReportRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.ListControlComplianceSummariesRequest, + dict, + ], +) +def test_list_control_compliance_summaries(request_type, transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListControlComplianceSummariesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_control_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = monitoring.ListControlComplianceSummariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlComplianceSummariesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_control_compliance_summaries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = monitoring.ListControlComplianceSummariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_control_compliance_summaries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == monitoring.ListControlComplianceSummariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_list_control_compliance_summaries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_control_compliance_summaries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_control_compliance_summaries + ] = mock_rpc + request = {} + client.list_control_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_control_compliance_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_control_compliance_summaries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_control_compliance_summaries + ] = mock_rpc + + request = {} + await client.list_control_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_control_compliance_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_async( + transport: str = "grpc_asyncio", + request_type=monitoring.ListControlComplianceSummariesRequest, +): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListControlComplianceSummariesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_control_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = monitoring.ListControlComplianceSummariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlComplianceSummariesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_async_from_dict(): + await test_list_control_compliance_summaries_async(request_type=dict) + + +def test_list_control_compliance_summaries_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.ListControlComplianceSummariesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + call.return_value = monitoring.ListControlComplianceSummariesResponse() + client.list_control_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.ListControlComplianceSummariesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListControlComplianceSummariesResponse() + ) + await client.list_control_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_control_compliance_summaries_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListControlComplianceSummariesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_control_compliance_summaries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_control_compliance_summaries_flattened_error(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_control_compliance_summaries( + monitoring.ListControlComplianceSummariesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_flattened_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.ListControlComplianceSummariesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListControlComplianceSummariesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_control_compliance_summaries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_flattened_error_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_control_compliance_summaries( + monitoring.ListControlComplianceSummariesRequest(), + parent="parent_value", + ) + + +def test_list_control_compliance_summaries_pager(transport_name: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_control_compliance_summaries( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, monitoring.ControlComplianceSummary) for i in results) + + +def test_list_control_compliance_summaries_pages(transport_name: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + ), + RuntimeError, + ) + pages = list(client.list_control_compliance_summaries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_async_pager(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_control_compliance_summaries( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, monitoring.ControlComplianceSummary) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_async_pages(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_control_compliance_summaries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.AggregateFrameworkComplianceReportRequest, + dict, + ], +) +def test_aggregate_framework_compliance_report(request_type, transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.AggregateFrameworkComplianceReportResponse() + response = client.aggregate_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = monitoring.AggregateFrameworkComplianceReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, monitoring.AggregateFrameworkComplianceReportResponse) + + +def test_aggregate_framework_compliance_report_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = monitoring.AggregateFrameworkComplianceReportRequest( + name="name_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.aggregate_framework_compliance_report(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == monitoring.AggregateFrameworkComplianceReportRequest( + name="name_value", + filter="filter_value", + ) + + +def test_aggregate_framework_compliance_report_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.aggregate_framework_compliance_report + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.aggregate_framework_compliance_report + ] = mock_rpc + request = {} + client.aggregate_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregate_framework_compliance_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_aggregate_framework_compliance_report_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.aggregate_framework_compliance_report + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.aggregate_framework_compliance_report + ] = mock_rpc + + request = {} + await client.aggregate_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.aggregate_framework_compliance_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_aggregate_framework_compliance_report_async( + transport: str = "grpc_asyncio", + request_type=monitoring.AggregateFrameworkComplianceReportRequest, +): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.AggregateFrameworkComplianceReportResponse() + ) + response = await client.aggregate_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = monitoring.AggregateFrameworkComplianceReportRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, monitoring.AggregateFrameworkComplianceReportResponse) + + +@pytest.mark.asyncio +async def test_aggregate_framework_compliance_report_async_from_dict(): + await test_aggregate_framework_compliance_report_async(request_type=dict) + + +def test_aggregate_framework_compliance_report_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.AggregateFrameworkComplianceReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + call.return_value = monitoring.AggregateFrameworkComplianceReportResponse() + client.aggregate_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_aggregate_framework_compliance_report_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = monitoring.AggregateFrameworkComplianceReportRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.AggregateFrameworkComplianceReportResponse() + ) + await client.aggregate_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_aggregate_framework_compliance_report_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.AggregateFrameworkComplianceReportResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.aggregate_framework_compliance_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_aggregate_framework_compliance_report_flattened_error(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregate_framework_compliance_report( + monitoring.AggregateFrameworkComplianceReportRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_aggregate_framework_compliance_report_flattened_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = monitoring.AggregateFrameworkComplianceReportResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.AggregateFrameworkComplianceReportResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.aggregate_framework_compliance_report( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_aggregate_framework_compliance_report_flattened_error_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.aggregate_framework_compliance_report( + monitoring.AggregateFrameworkComplianceReportRequest(), + name="name_value", + ) + + +def test_list_framework_compliance_summaries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_framework_compliance_summaries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_framework_compliance_summaries + ] = mock_rpc + + request = {} + client.list_framework_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_framework_compliance_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_framework_compliance_summaries_rest_required_fields( + request_type=monitoring.ListFrameworkComplianceSummariesRequest, +): + transport_class = transports.MonitoringRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_framework_compliance_summaries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_framework_compliance_summaries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = monitoring.ListFrameworkComplianceSummariesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.ListFrameworkComplianceSummariesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_framework_compliance_summaries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_framework_compliance_summaries_rest_unset_required_fields(): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_framework_compliance_summaries._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_framework_compliance_summaries_rest_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.ListFrameworkComplianceSummariesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = monitoring.ListFrameworkComplianceSummariesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_framework_compliance_summaries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/frameworkComplianceSummaries" + % client.transport._host, + args[1], + ) + + +def test_list_framework_compliance_summaries_rest_flattened_error( + transport: str = "rest", +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_framework_compliance_summaries( + monitoring.ListFrameworkComplianceSummariesRequest(), + parent="parent_value", + ) + + +def test_list_framework_compliance_summaries_rest_pager(transport: str = "rest"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFrameworkComplianceSummariesResponse( + framework_compliance_summaries=[ + monitoring.FrameworkComplianceSummary(), + monitoring.FrameworkComplianceSummary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + monitoring.ListFrameworkComplianceSummariesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_framework_compliance_summaries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, monitoring.FrameworkComplianceSummary) for i in results + ) + + pages = list( + client.list_framework_compliance_summaries(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_finding_summaries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_finding_summaries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_finding_summaries + ] = mock_rpc + + request = {} + client.list_finding_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_finding_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_finding_summaries_rest_required_fields( + request_type=monitoring.ListFindingSummariesRequest, +): + transport_class = transports.MonitoringRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_finding_summaries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_finding_summaries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "end_time", + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = monitoring.ListFindingSummariesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.ListFindingSummariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_finding_summaries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_finding_summaries_rest_unset_required_fields(): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_finding_summaries._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "endTime", + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_finding_summaries_rest_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.ListFindingSummariesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = monitoring.ListFindingSummariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_finding_summaries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/findingSummaries" + % client.transport._host, + args[1], + ) + + +def test_list_finding_summaries_rest_flattened_error(transport: str = "rest"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_finding_summaries( + monitoring.ListFindingSummariesRequest(), + parent="parent_value", + ) + + +def test_list_finding_summaries_rest_pager(transport: str = "rest"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + next_page_token="abc", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[], + next_page_token="def", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListFindingSummariesResponse( + finding_summaries=[ + monitoring.FindingSummary(), + monitoring.FindingSummary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + monitoring.ListFindingSummariesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_finding_summaries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, monitoring.FindingSummary) for i in results) + + pages = list(client.list_finding_summaries(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_fetch_framework_compliance_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_framework_compliance_report + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_framework_compliance_report + ] = mock_rpc + + request = {} + client.fetch_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_framework_compliance_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_framework_compliance_report_rest_required_fields( + request_type=monitoring.FetchFrameworkComplianceReportRequest, +): + transport_class = transports.MonitoringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_framework_compliance_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_framework_compliance_report._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("end_time",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = monitoring.FrameworkComplianceReport() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.FrameworkComplianceReport.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.fetch_framework_compliance_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_framework_compliance_report_rest_unset_required_fields(): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.fetch_framework_compliance_report._get_unset_required_fields({}) + ) + assert set(unset_fields) == (set(("endTime",)) & set(("name",))) + + +def test_fetch_framework_compliance_report_rest_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.FrameworkComplianceReport() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = monitoring.FrameworkComplianceReport.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.fetch_framework_compliance_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/frameworkComplianceReports/*}:fetch" + % client.transport._host, + args[1], + ) + + +def test_fetch_framework_compliance_report_rest_flattened_error( + transport: str = "rest", +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_framework_compliance_report( + monitoring.FetchFrameworkComplianceReportRequest(), + name="name_value", + ) + + +def test_list_control_compliance_summaries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_control_compliance_summaries + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_control_compliance_summaries + ] = mock_rpc + + request = {} + client.list_control_compliance_summaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_control_compliance_summaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_control_compliance_summaries_rest_required_fields( + request_type=monitoring.ListControlComplianceSummariesRequest, +): + transport_class = transports.MonitoringRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_control_compliance_summaries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_control_compliance_summaries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "end_time", + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = monitoring.ListControlComplianceSummariesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.ListControlComplianceSummariesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_control_compliance_summaries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_control_compliance_summaries_rest_unset_required_fields(): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.list_control_compliance_summaries._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "endTime", + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_control_compliance_summaries_rest_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.ListControlComplianceSummariesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = monitoring.ListControlComplianceSummariesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_control_compliance_summaries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*/frameworkComplianceReports/*}/controlComplianceSummaries" + % client.transport._host, + args[1], + ) + + +def test_list_control_compliance_summaries_rest_flattened_error( + transport: str = "rest", +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_control_compliance_summaries( + monitoring.ListControlComplianceSummariesRequest(), + parent="parent_value", + ) + + +def test_list_control_compliance_summaries_rest_pager(transport: str = "rest"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + next_page_token="abc", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[], + next_page_token="def", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + ], + next_page_token="ghi", + ), + monitoring.ListControlComplianceSummariesResponse( + control_compliance_summaries=[ + monitoring.ControlComplianceSummary(), + monitoring.ControlComplianceSummary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + monitoring.ListControlComplianceSummariesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + + pager = client.list_control_compliance_summaries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, monitoring.ControlComplianceSummary) for i in results) + + pages = list( + client.list_control_compliance_summaries(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_aggregate_framework_compliance_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.aggregate_framework_compliance_report + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.aggregate_framework_compliance_report + ] = mock_rpc + + request = {} + client.aggregate_framework_compliance_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregate_framework_compliance_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_aggregate_framework_compliance_report_rest_required_fields( + request_type=monitoring.AggregateFrameworkComplianceReportRequest, +): + transport_class = transports.MonitoringRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregate_framework_compliance_report._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregate_framework_compliance_report._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "interval", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = monitoring.AggregateFrameworkComplianceReportResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.AggregateFrameworkComplianceReportResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.aggregate_framework_compliance_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_aggregate_framework_compliance_report_rest_unset_required_fields(): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = ( + transport.aggregate_framework_compliance_report._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "interval", + ) + ) + & set(("name",)) + ) + + +def test_aggregate_framework_compliance_report_rest_flattened(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.AggregateFrameworkComplianceReportResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = monitoring.AggregateFrameworkComplianceReportResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.aggregate_framework_compliance_report(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/frameworkComplianceReports/*}:aggregate" + % client.transport._host, + args[1], + ) + + +def test_aggregate_framework_compliance_report_rest_flattened_error( + transport: str = "rest", +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregate_framework_compliance_report( + monitoring.AggregateFrameworkComplianceReportRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MonitoringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MonitoringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MonitoringClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MonitoringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MonitoringClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MonitoringClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MonitoringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MonitoringClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MonitoringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MonitoringClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MonitoringGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MonitoringGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MonitoringGrpcTransport, + transports.MonitoringGrpcAsyncIOTransport, + transports.MonitoringRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = MonitoringClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_framework_compliance_summaries_empty_call_grpc(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + call.return_value = monitoring.ListFrameworkComplianceSummariesResponse() + client.list_framework_compliance_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListFrameworkComplianceSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_finding_summaries_empty_call_grpc(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + call.return_value = monitoring.ListFindingSummariesResponse() + client.list_finding_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListFindingSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_framework_compliance_report_empty_call_grpc(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + call.return_value = monitoring.FrameworkComplianceReport() + client.fetch_framework_compliance_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.FetchFrameworkComplianceReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_control_compliance_summaries_empty_call_grpc(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + call.return_value = monitoring.ListControlComplianceSummariesResponse() + client.list_control_compliance_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListControlComplianceSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_aggregate_framework_compliance_report_empty_call_grpc(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + call.return_value = monitoring.AggregateFrameworkComplianceReportResponse() + client.aggregate_framework_compliance_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.AggregateFrameworkComplianceReportRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = MonitoringAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_framework_compliance_summaries_empty_call_grpc_asyncio(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListFrameworkComplianceSummariesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_framework_compliance_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListFrameworkComplianceSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_finding_summaries_empty_call_grpc_asyncio(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListFindingSummariesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_finding_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListFindingSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_fetch_framework_compliance_report_empty_call_grpc_asyncio(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.FrameworkComplianceReport( + framework="framework_value", + framework_description="framework_description_value", + framework_type=common.Framework.FrameworkType.BUILT_IN, + supported_cloud_providers=[common.CloudProvider.AWS], + framework_categories=[ + common.FrameworkCategory.INDUSTRY_DEFINED_STANDARD + ], + framework_display_name="framework_display_name_value", + name="name_value", + major_revision_id=1811, + minor_revision_id=1823, + ) + ) + await client.fetch_framework_compliance_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.FetchFrameworkComplianceReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_control_compliance_summaries_empty_call_grpc_asyncio(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.ListControlComplianceSummariesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_control_compliance_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListControlComplianceSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_aggregate_framework_compliance_report_empty_call_grpc_asyncio(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + monitoring.AggregateFrameworkComplianceReportResponse() + ) + await client.aggregate_framework_compliance_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.AggregateFrameworkComplianceReportRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = MonitoringClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_framework_compliance_summaries_rest_bad_request( + request_type=monitoring.ListFrameworkComplianceSummariesRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_framework_compliance_summaries(request) + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.ListFrameworkComplianceSummariesRequest, + dict, + ], +) +def test_list_framework_compliance_summaries_rest_call_success(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.ListFrameworkComplianceSummariesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.ListFrameworkComplianceSummariesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_framework_compliance_summaries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFrameworkComplianceSummariesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_framework_compliance_summaries_rest_interceptors(null_interceptor): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MonitoringRestInterceptor(), + ) + client = MonitoringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MonitoringRestInterceptor, "post_list_framework_compliance_summaries" + ) as post, mock.patch.object( + transports.MonitoringRestInterceptor, + "post_list_framework_compliance_summaries_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MonitoringRestInterceptor, "pre_list_framework_compliance_summaries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = monitoring.ListFrameworkComplianceSummariesRequest.pb( + monitoring.ListFrameworkComplianceSummariesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = monitoring.ListFrameworkComplianceSummariesResponse.to_json( + monitoring.ListFrameworkComplianceSummariesResponse() + ) + req.return_value.content = return_value + + request = monitoring.ListFrameworkComplianceSummariesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = monitoring.ListFrameworkComplianceSummariesResponse() + post_with_metadata.return_value = ( + monitoring.ListFrameworkComplianceSummariesResponse(), + metadata, + ) + + client.list_framework_compliance_summaries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_finding_summaries_rest_bad_request( + request_type=monitoring.ListFindingSummariesRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_finding_summaries(request) + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.ListFindingSummariesRequest, + dict, + ], +) +def test_list_finding_summaries_rest_call_success(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.ListFindingSummariesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.ListFindingSummariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_finding_summaries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListFindingSummariesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_finding_summaries_rest_interceptors(null_interceptor): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MonitoringRestInterceptor(), + ) + client = MonitoringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MonitoringRestInterceptor, "post_list_finding_summaries" + ) as post, mock.patch.object( + transports.MonitoringRestInterceptor, + "post_list_finding_summaries_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MonitoringRestInterceptor, "pre_list_finding_summaries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = monitoring.ListFindingSummariesRequest.pb( + monitoring.ListFindingSummariesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = monitoring.ListFindingSummariesResponse.to_json( + monitoring.ListFindingSummariesResponse() + ) + req.return_value.content = return_value + + request = monitoring.ListFindingSummariesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = monitoring.ListFindingSummariesResponse() + post_with_metadata.return_value = ( + monitoring.ListFindingSummariesResponse(), + metadata, + ) + + client.list_finding_summaries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_fetch_framework_compliance_report_rest_bad_request( + request_type=monitoring.FetchFrameworkComplianceReportRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.fetch_framework_compliance_report(request) + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.FetchFrameworkComplianceReportRequest, + dict, + ], +) +def test_fetch_framework_compliance_report_rest_call_success(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.FrameworkComplianceReport( + framework="framework_value", + framework_description="framework_description_value", + framework_type=common.Framework.FrameworkType.BUILT_IN, + supported_cloud_providers=[common.CloudProvider.AWS], + framework_categories=[common.FrameworkCategory.INDUSTRY_DEFINED_STANDARD], + framework_display_name="framework_display_name_value", + name="name_value", + major_revision_id=1811, + minor_revision_id=1823, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.FrameworkComplianceReport.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.fetch_framework_compliance_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, monitoring.FrameworkComplianceReport) + assert response.framework == "framework_value" + assert response.framework_description == "framework_description_value" + assert response.framework_type == common.Framework.FrameworkType.BUILT_IN + assert response.supported_cloud_providers == [common.CloudProvider.AWS] + assert response.framework_categories == [ + common.FrameworkCategory.INDUSTRY_DEFINED_STANDARD + ] + assert response.framework_display_name == "framework_display_name_value" + assert response.name == "name_value" + assert response.major_revision_id == 1811 + assert response.minor_revision_id == 1823 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_framework_compliance_report_rest_interceptors(null_interceptor): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MonitoringRestInterceptor(), + ) + client = MonitoringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MonitoringRestInterceptor, "post_fetch_framework_compliance_report" + ) as post, mock.patch.object( + transports.MonitoringRestInterceptor, + "post_fetch_framework_compliance_report_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MonitoringRestInterceptor, "pre_fetch_framework_compliance_report" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = monitoring.FetchFrameworkComplianceReportRequest.pb( + monitoring.FetchFrameworkComplianceReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = monitoring.FrameworkComplianceReport.to_json( + monitoring.FrameworkComplianceReport() + ) + req.return_value.content = return_value + + request = monitoring.FetchFrameworkComplianceReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = monitoring.FrameworkComplianceReport() + post_with_metadata.return_value = ( + monitoring.FrameworkComplianceReport(), + metadata, + ) + + client.fetch_framework_compliance_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_control_compliance_summaries_rest_bad_request( + request_type=monitoring.ListControlComplianceSummariesRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_control_compliance_summaries(request) + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.ListControlComplianceSummariesRequest, + dict, + ], +) +def test_list_control_compliance_summaries_rest_call_success(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.ListControlComplianceSummariesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.ListControlComplianceSummariesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_control_compliance_summaries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListControlComplianceSummariesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_control_compliance_summaries_rest_interceptors(null_interceptor): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MonitoringRestInterceptor(), + ) + client = MonitoringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MonitoringRestInterceptor, "post_list_control_compliance_summaries" + ) as post, mock.patch.object( + transports.MonitoringRestInterceptor, + "post_list_control_compliance_summaries_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MonitoringRestInterceptor, "pre_list_control_compliance_summaries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = monitoring.ListControlComplianceSummariesRequest.pb( + monitoring.ListControlComplianceSummariesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = monitoring.ListControlComplianceSummariesResponse.to_json( + monitoring.ListControlComplianceSummariesResponse() + ) + req.return_value.content = return_value + + request = monitoring.ListControlComplianceSummariesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = monitoring.ListControlComplianceSummariesResponse() + post_with_metadata.return_value = ( + monitoring.ListControlComplianceSummariesResponse(), + metadata, + ) + + client.list_control_compliance_summaries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_aggregate_framework_compliance_report_rest_bad_request( + request_type=monitoring.AggregateFrameworkComplianceReportRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.aggregate_framework_compliance_report(request) + + +@pytest.mark.parametrize( + "request_type", + [ + monitoring.AggregateFrameworkComplianceReportRequest, + dict, + ], +) +def test_aggregate_framework_compliance_report_rest_call_success(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/frameworkComplianceReports/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = monitoring.AggregateFrameworkComplianceReportResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = monitoring.AggregateFrameworkComplianceReportResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.aggregate_framework_compliance_report(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, monitoring.AggregateFrameworkComplianceReportResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregate_framework_compliance_report_rest_interceptors(null_interceptor): + transport = transports.MonitoringRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.MonitoringRestInterceptor(), + ) + client = MonitoringClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.MonitoringRestInterceptor, + "post_aggregate_framework_compliance_report", + ) as post, mock.patch.object( + transports.MonitoringRestInterceptor, + "post_aggregate_framework_compliance_report_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.MonitoringRestInterceptor, + "pre_aggregate_framework_compliance_report", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = monitoring.AggregateFrameworkComplianceReportRequest.pb( + monitoring.AggregateFrameworkComplianceReportRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = monitoring.AggregateFrameworkComplianceReportResponse.to_json( + monitoring.AggregateFrameworkComplianceReportResponse() + ) + req.return_value.content = return_value + + request = monitoring.AggregateFrameworkComplianceReportRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = monitoring.AggregateFrameworkComplianceReportResponse() + post_with_metadata.return_value = ( + monitoring.AggregateFrameworkComplianceReportResponse(), + metadata, + ) + + client.aggregate_framework_compliance_report( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "organizations/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "organizations/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "name": "organizations/sample1/locations/sample2/operations/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "name": "organizations/sample1/locations/sample2/operations/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "name": "organizations/sample1/locations/sample2/operations/sample3" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "organizations/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_framework_compliance_summaries_empty_call_rest(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_framework_compliance_summaries), "__call__" + ) as call: + client.list_framework_compliance_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListFrameworkComplianceSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_finding_summaries_empty_call_rest(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_finding_summaries), "__call__" + ) as call: + client.list_finding_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListFindingSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_framework_compliance_report_empty_call_rest(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_framework_compliance_report), "__call__" + ) as call: + client.fetch_framework_compliance_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.FetchFrameworkComplianceReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_control_compliance_summaries_empty_call_rest(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_control_compliance_summaries), "__call__" + ) as call: + client.list_control_compliance_summaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.ListControlComplianceSummariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_aggregate_framework_compliance_report_empty_call_rest(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.aggregate_framework_compliance_report), "__call__" + ) as call: + client.aggregate_framework_compliance_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = monitoring.AggregateFrameworkComplianceReportRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MonitoringGrpcTransport, + ) + + +def test_monitoring_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MonitoringTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_monitoring_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.cloudsecuritycompliance_v1.services.monitoring.transports.MonitoringTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.MonitoringTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_framework_compliance_summaries", + "list_finding_summaries", + "fetch_framework_compliance_report", + "list_control_compliance_summaries", + "aggregate_framework_compliance_report", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_monitoring_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.cloudsecuritycompliance_v1.services.monitoring.transports.MonitoringTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MonitoringTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_monitoring_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.cloudsecuritycompliance_v1.services.monitoring.transports.MonitoringTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MonitoringTransport() + adc.assert_called_once() + + +def test_monitoring_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MonitoringClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MonitoringGrpcTransport, + transports.MonitoringGrpcAsyncIOTransport, + ], +) +def test_monitoring_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MonitoringGrpcTransport, + transports.MonitoringGrpcAsyncIOTransport, + transports.MonitoringRestTransport, + ], +) +def test_monitoring_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MonitoringGrpcTransport, grpc_helpers), + (transports.MonitoringGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_monitoring_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "cloudsecuritycompliance.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="cloudsecuritycompliance.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.MonitoringGrpcTransport, transports.MonitoringGrpcAsyncIOTransport], +) +def test_monitoring_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_monitoring_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.MonitoringRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_monitoring_host_no_port(transport_name): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudsecuritycompliance.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudsecuritycompliance.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsecuritycompliance.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_monitoring_host_with_port(transport_name): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="cloudsecuritycompliance.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "cloudsecuritycompliance.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudsecuritycompliance.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_monitoring_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MonitoringClient( + credentials=creds1, + transport=transport_name, + ) + client2 = MonitoringClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_framework_compliance_summaries._session + session2 = client2.transport.list_framework_compliance_summaries._session + assert session1 != session2 + session1 = client1.transport.list_finding_summaries._session + session2 = client2.transport.list_finding_summaries._session + assert session1 != session2 + session1 = client1.transport.fetch_framework_compliance_report._session + session2 = client2.transport.fetch_framework_compliance_report._session + assert session1 != session2 + session1 = client1.transport.list_control_compliance_summaries._session + session2 = client2.transport.list_control_compliance_summaries._session + assert session1 != session2 + session1 = client1.transport.aggregate_framework_compliance_report._session + session2 = client2.transport.aggregate_framework_compliance_report._session + assert session1 != session2 + + +def test_monitoring_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MonitoringGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_monitoring_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MonitoringGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.MonitoringGrpcTransport, transports.MonitoringGrpcAsyncIOTransport], +) +def test_monitoring_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.MonitoringGrpcTransport, transports.MonitoringGrpcAsyncIOTransport], +) +def test_monitoring_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_control_compliance_summary_path(): + project = "squid" + location = "clam" + framework_compliance_report = "whelk" + control_compliance_summary = "octopus" + expected = "projects/{project}/locations/{location}/frameworkComplianceReports/{framework_compliance_report}/controlComplianceSummaries/{control_compliance_summary}".format( + project=project, + location=location, + framework_compliance_report=framework_compliance_report, + control_compliance_summary=control_compliance_summary, + ) + actual = MonitoringClient.control_compliance_summary_path( + project, location, framework_compliance_report, control_compliance_summary + ) + assert expected == actual + + +def test_parse_control_compliance_summary_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "framework_compliance_report": "cuttlefish", + "control_compliance_summary": "mussel", + } + path = MonitoringClient.control_compliance_summary_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_control_compliance_summary_path(path) + assert expected == actual + + +def test_finding_summary_path(): + project = "winkle" + location = "nautilus" + finding_summary = "scallop" + expected = "projects/{project}/locations/{location}/findingSummaries/{finding_summary}".format( + project=project, + location=location, + finding_summary=finding_summary, + ) + actual = MonitoringClient.finding_summary_path(project, location, finding_summary) + assert expected == actual + + +def test_parse_finding_summary_path(): + expected = { + "project": "abalone", + "location": "squid", + "finding_summary": "clam", + } + path = MonitoringClient.finding_summary_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_finding_summary_path(path) + assert expected == actual + + +def test_framework_compliance_report_path(): + project = "whelk" + location = "octopus" + framework_compliance_report = "oyster" + expected = "projects/{project}/locations/{location}/frameworkComplianceReports/{framework_compliance_report}".format( + project=project, + location=location, + framework_compliance_report=framework_compliance_report, + ) + actual = MonitoringClient.framework_compliance_report_path( + project, location, framework_compliance_report + ) + assert expected == actual + + +def test_parse_framework_compliance_report_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "framework_compliance_report": "mussel", + } + path = MonitoringClient.framework_compliance_report_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_framework_compliance_report_path(path) + assert expected == actual + + +def test_framework_compliance_summary_path(): + project = "winkle" + location = "nautilus" + framework_compliance_summary = "scallop" + expected = "projects/{project}/locations/{location}/frameworkComplianceSummaries/{framework_compliance_summary}".format( + project=project, + location=location, + framework_compliance_summary=framework_compliance_summary, + ) + actual = MonitoringClient.framework_compliance_summary_path( + project, location, framework_compliance_summary + ) + assert expected == actual + + +def test_parse_framework_compliance_summary_path(): + expected = { + "project": "abalone", + "location": "squid", + "framework_compliance_summary": "clam", + } + path = MonitoringClient.framework_compliance_summary_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_framework_compliance_summary_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = MonitoringClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = MonitoringClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = MonitoringClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = MonitoringClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = MonitoringClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = MonitoringClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format( + project=project, + ) + actual = MonitoringClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = MonitoringClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = MonitoringClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = MonitoringClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MonitoringClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.MonitoringTransport, "_prep_wrapped_messages" + ) as prep: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.MonitoringTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = MonitoringClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = MonitoringClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = MonitoringAsyncClient(credentials=async_anonymous_credentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = MonitoringAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = MonitoringClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MonitoringClient, transports.MonitoringGrpcTransport), + (MonitoringAsyncClient, transports.MonitoringGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index 63dedd94ac1e..accb14656b9a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -1727,6 +1727,7 @@ class ListEntriesRequest(proto.Message): - entry_type - entry_source.display_name + - parent_entry The comparison operators are =, !=, <, >, <=, >=. The service compares strings according to lexical order. @@ -1734,8 +1735,12 @@ class ListEntriesRequest(proto.Message): You can use the logical operators AND, OR, NOT in the filter. - You can use Wildcard "\*", but for entry_type you need to - provide the full project id or number. + You can use Wildcard "\*", but for entry_type and + parent_entry you need to provide the full project id or + number. + + You cannot use parent_entry in conjunction with other + fields. Example filter expressions: @@ -1743,7 +1748,8 @@ class ListEntriesRequest(proto.Message): - "entry_type=projects/example-project/locations/global/entryTypes/example-entry_type" - "entry_type=projects/example-project/locations/us/entryTypes/a\* OR entry_type=projects/another-project/locations/\*" - - "NOT entry_source.display_name=AnotherExampleDisplayName". + - "NOT entry_source.display_name=AnotherExampleDisplayName" + - "parent_entry=projects/example-project/locations/us/entryGroups/example-entry-group/entries/example-entry". """ parent: str = proto.Field( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py index 9c254e7f8f9c..f70fb4d3a871 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py @@ -19,7 +19,7 @@ import proto # type: ignore -from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import datascans_common, processing __protobuf__ = proto.module( package="google.cloud.dataplex.v1", @@ -64,6 +64,10 @@ class DataProfileSpec(proto.Message): If specified, the fields will be excluded from data profile, regardless of ``include_fields`` value. + catalog_publishing_enabled (bool): + Optional. If set, the latest DataScan job + result will be published as Dataplex Universal + Catalog metadata. """ class PostScanActions(proto.Message): @@ -141,6 +145,10 @@ class SelectedFields(proto.Message): number=6, message=SelectedFields, ) + catalog_publishing_enabled: bool = proto.Field( + proto.BOOL, + number=8, + ) class DataProfileResult(proto.Message): @@ -158,6 +166,10 @@ class DataProfileResult(proto.Message): result. post_scan_actions_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult): Output only. The result of post scan actions. + catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): + Output only. The status of publishing the + data scan as Dataplex Universal Catalog + metadata. """ class Profile(proto.Message): @@ -545,6 +557,13 @@ class State(proto.Enum): number=6, message=PostScanActionsResult, ) + catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = ( + proto.Field( + proto.MESSAGE, + number=7, + message=datascans_common.DataScanCatalogPublishingStatus, + ) + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index ecb5eb2e9892..bada8c2eb264 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -7133,6 +7133,7 @@ def test_create_data_scan_rest_call_success(request_type): "field_names": ["field_names_value1", "field_names_value2"] }, "exclude_fields": {}, + "catalog_publishing_enabled": True, }, "data_discovery_spec": { "bigquery_publishing_config": { @@ -7250,6 +7251,7 @@ def test_create_data_scan_rest_call_success(request_type): "post_scan_actions_result": { "bigquery_export_result": {"state": 1, "message": "message_value"} }, + "catalog_publishing_status": {}, }, "data_discovery_result": { "bigquery_publishing": { @@ -7558,6 +7560,7 @@ def test_update_data_scan_rest_call_success(request_type): "field_names": ["field_names_value1", "field_names_value2"] }, "exclude_fields": {}, + "catalog_publishing_enabled": True, }, "data_discovery_spec": { "bigquery_publishing_config": { @@ -7675,6 +7678,7 @@ def test_update_data_scan_rest_call_success(request_type): "post_scan_actions_result": { "bigquery_export_result": {"state": 1, "message": "message_value"} }, + "catalog_publishing_status": {}, }, "data_discovery_result": { "bigquery_publishing": { diff --git a/packages/google-cloud-network-management/docs/network_management_v1/organization_vpc_flow_logs_service.rst b/packages/google-cloud-network-management/docs/network_management_v1/organization_vpc_flow_logs_service.rst new file mode 100644 index 000000000000..ad70284c237f --- /dev/null +++ b/packages/google-cloud-network-management/docs/network_management_v1/organization_vpc_flow_logs_service.rst @@ -0,0 +1,10 @@ +OrganizationVpcFlowLogsService +------------------------------------------------ + +.. automodule:: google.cloud.network_management_v1.services.organization_vpc_flow_logs_service + :members: + :inherited-members: + +.. automodule:: google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-network-management/docs/network_management_v1/services_.rst b/packages/google-cloud-network-management/docs/network_management_v1/services_.rst index fc797859cd09..7b050e681b50 100644 --- a/packages/google-cloud-network-management/docs/network_management_v1/services_.rst +++ b/packages/google-cloud-network-management/docs/network_management_v1/services_.rst @@ -3,5 +3,6 @@ Services for Google Cloud Network Management v1 API .. toctree:: :maxdepth: 2 + organization_vpc_flow_logs_service reachability_service vpc_flow_logs_service diff --git a/packages/google-cloud-network-management/google/cloud/network_management/__init__.py b/packages/google-cloud-network-management/google/cloud/network_management/__init__.py index 17b37d168cb1..dd26414a5111 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management/__init__.py +++ b/packages/google-cloud-network-management/google/cloud/network_management/__init__.py @@ -18,6 +18,12 @@ __version__ = package_version.__version__ +from google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.async_client import ( + OrganizationVpcFlowLogsServiceAsyncClient, +) +from google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.client import ( + OrganizationVpcFlowLogsServiceClient, +) from google.cloud.network_management_v1.services.reachability_service.async_client import ( ReachabilityServiceAsyncClient, ) @@ -63,7 +69,9 @@ ForwardingRuleInfo, GKEMasterInfo, GoogleServiceInfo, + HybridSubnetInfo, InstanceInfo, + InterconnectAttachmentInfo, LoadBalancerBackend, LoadBalancerBackendInfo, LoadBalancerInfo, @@ -89,13 +97,20 @@ GetVpcFlowLogsConfigRequest, ListVpcFlowLogsConfigsRequest, ListVpcFlowLogsConfigsResponse, + QueryOrgVpcFlowLogsConfigsRequest, + QueryOrgVpcFlowLogsConfigsResponse, + ShowEffectiveFlowLogsConfigsRequest, + ShowEffectiveFlowLogsConfigsResponse, UpdateVpcFlowLogsConfigRequest, ) from google.cloud.network_management_v1.types.vpc_flow_logs_config import ( + EffectiveVpcFlowLogsConfig, VpcFlowLogsConfig, ) __all__ = ( + "OrganizationVpcFlowLogsServiceClient", + "OrganizationVpcFlowLogsServiceAsyncClient", "ReachabilityServiceClient", "ReachabilityServiceAsyncClient", "VpcFlowLogsServiceClient", @@ -128,7 +143,9 @@ "ForwardingRuleInfo", "GKEMasterInfo", "GoogleServiceInfo", + "HybridSubnetInfo", "InstanceInfo", + "InterconnectAttachmentInfo", "LoadBalancerBackend", "LoadBalancerBackendInfo", "LoadBalancerInfo", @@ -152,6 +169,11 @@ "GetVpcFlowLogsConfigRequest", "ListVpcFlowLogsConfigsRequest", "ListVpcFlowLogsConfigsResponse", + "QueryOrgVpcFlowLogsConfigsRequest", + "QueryOrgVpcFlowLogsConfigsResponse", + "ShowEffectiveFlowLogsConfigsRequest", + "ShowEffectiveFlowLogsConfigsResponse", "UpdateVpcFlowLogsConfigRequest", + "EffectiveVpcFlowLogsConfig", "VpcFlowLogsConfig", ) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/__init__.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/__init__.py index 8927caf475c3..fdf67817917e 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/__init__.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/__init__.py @@ -18,6 +18,10 @@ __version__ = package_version.__version__ +from .services.organization_vpc_flow_logs_service import ( + OrganizationVpcFlowLogsServiceAsyncClient, + OrganizationVpcFlowLogsServiceClient, +) from .services.reachability_service import ( ReachabilityServiceAsyncClient, ReachabilityServiceClient, @@ -59,7 +63,9 @@ ForwardingRuleInfo, GKEMasterInfo, GoogleServiceInfo, + HybridSubnetInfo, InstanceInfo, + InterconnectAttachmentInfo, LoadBalancerBackend, LoadBalancerBackendInfo, LoadBalancerInfo, @@ -85,11 +91,16 @@ GetVpcFlowLogsConfigRequest, ListVpcFlowLogsConfigsRequest, ListVpcFlowLogsConfigsResponse, + QueryOrgVpcFlowLogsConfigsRequest, + QueryOrgVpcFlowLogsConfigsResponse, + ShowEffectiveFlowLogsConfigsRequest, + ShowEffectiveFlowLogsConfigsResponse, UpdateVpcFlowLogsConfigRequest, ) -from .types.vpc_flow_logs_config import VpcFlowLogsConfig +from .types.vpc_flow_logs_config import EffectiveVpcFlowLogsConfig, VpcFlowLogsConfig __all__ = ( + "OrganizationVpcFlowLogsServiceAsyncClient", "ReachabilityServiceAsyncClient", "VpcFlowLogsServiceAsyncClient", "AbortInfo", @@ -105,6 +116,7 @@ "DeliverInfo", "DirectVpcEgressConnectionInfo", "DropInfo", + "EffectiveVpcFlowLogsConfig", "Endpoint", "EndpointInfo", "FirewallInfo", @@ -114,7 +126,9 @@ "GetConnectivityTestRequest", "GetVpcFlowLogsConfigRequest", "GoogleServiceInfo", + "HybridSubnetInfo", "InstanceInfo", + "InterconnectAttachmentInfo", "LatencyDistribution", "LatencyPercentile", "ListConnectivityTestsRequest", @@ -128,8 +142,11 @@ "NatInfo", "NetworkInfo", "OperationMetadata", + "OrganizationVpcFlowLogsServiceClient", "ProbingDetails", "ProxyConnectionInfo", + "QueryOrgVpcFlowLogsConfigsRequest", + "QueryOrgVpcFlowLogsConfigsResponse", "ReachabilityDetails", "ReachabilityServiceClient", "RedisClusterInfo", @@ -138,6 +155,8 @@ "RouteInfo", "ServerlessExternalConnectionInfo", "ServerlessNegInfo", + "ShowEffectiveFlowLogsConfigsRequest", + "ShowEffectiveFlowLogsConfigsResponse", "Step", "StorageBucketInfo", "Trace", diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/gapic_metadata.json b/packages/google-cloud-network-management/google/cloud/network_management_v1/gapic_metadata.json index 4f020eb70b02..723c9d95854d 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/gapic_metadata.json +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/gapic_metadata.json @@ -5,6 +5,100 @@ "protoPackage": "google.cloud.networkmanagement.v1", "schema": "1.0", "services": { + "OrganizationVpcFlowLogsService": { + "clients": { + "grpc": { + "libraryClient": "OrganizationVpcFlowLogsServiceClient", + "rpcs": { + "CreateVpcFlowLogsConfig": { + "methods": [ + "create_vpc_flow_logs_config" + ] + }, + "DeleteVpcFlowLogsConfig": { + "methods": [ + "delete_vpc_flow_logs_config" + ] + }, + "GetVpcFlowLogsConfig": { + "methods": [ + "get_vpc_flow_logs_config" + ] + }, + "ListVpcFlowLogsConfigs": { + "methods": [ + "list_vpc_flow_logs_configs" + ] + }, + "UpdateVpcFlowLogsConfig": { + "methods": [ + "update_vpc_flow_logs_config" + ] + } + } + }, + "grpc-async": { + "libraryClient": "OrganizationVpcFlowLogsServiceAsyncClient", + "rpcs": { + "CreateVpcFlowLogsConfig": { + "methods": [ + "create_vpc_flow_logs_config" + ] + }, + "DeleteVpcFlowLogsConfig": { + "methods": [ + "delete_vpc_flow_logs_config" + ] + }, + "GetVpcFlowLogsConfig": { + "methods": [ + "get_vpc_flow_logs_config" + ] + }, + "ListVpcFlowLogsConfigs": { + "methods": [ + "list_vpc_flow_logs_configs" + ] + }, + "UpdateVpcFlowLogsConfig": { + "methods": [ + "update_vpc_flow_logs_config" + ] + } + } + }, + "rest": { + "libraryClient": "OrganizationVpcFlowLogsServiceClient", + "rpcs": { + "CreateVpcFlowLogsConfig": { + "methods": [ + "create_vpc_flow_logs_config" + ] + }, + "DeleteVpcFlowLogsConfig": { + "methods": [ + "delete_vpc_flow_logs_config" + ] + }, + "GetVpcFlowLogsConfig": { + "methods": [ + "get_vpc_flow_logs_config" + ] + }, + "ListVpcFlowLogsConfigs": { + "methods": [ + "list_vpc_flow_logs_configs" + ] + }, + "UpdateVpcFlowLogsConfig": { + "methods": [ + "update_vpc_flow_logs_config" + ] + } + } + } + } + }, "ReachabilityService": { "clients": { "grpc": { @@ -139,6 +233,16 @@ "list_vpc_flow_logs_configs" ] }, + "QueryOrgVpcFlowLogsConfigs": { + "methods": [ + "query_org_vpc_flow_logs_configs" + ] + }, + "ShowEffectiveFlowLogsConfigs": { + "methods": [ + "show_effective_flow_logs_configs" + ] + }, "UpdateVpcFlowLogsConfig": { "methods": [ "update_vpc_flow_logs_config" @@ -169,6 +273,16 @@ "list_vpc_flow_logs_configs" ] }, + "QueryOrgVpcFlowLogsConfigs": { + "methods": [ + "query_org_vpc_flow_logs_configs" + ] + }, + "ShowEffectiveFlowLogsConfigs": { + "methods": [ + "show_effective_flow_logs_configs" + ] + }, "UpdateVpcFlowLogsConfig": { "methods": [ "update_vpc_flow_logs_config" @@ -199,6 +313,16 @@ "list_vpc_flow_logs_configs" ] }, + "QueryOrgVpcFlowLogsConfigs": { + "methods": [ + "query_org_vpc_flow_logs_configs" + ] + }, + "ShowEffectiveFlowLogsConfigs": { + "methods": [ + "show_effective_flow_logs_configs" + ] + }, "UpdateVpcFlowLogsConfig": { "methods": [ "update_vpc_flow_logs_config" diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/__init__.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/__init__.py new file mode 100644 index 000000000000..60cbe134687d --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import OrganizationVpcFlowLogsServiceAsyncClient +from .client import OrganizationVpcFlowLogsServiceClient + +__all__ = ( + "OrganizationVpcFlowLogsServiceClient", + "OrganizationVpcFlowLogsServiceAsyncClient", +) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/async_client.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/async_client.py new file mode 100644 index 000000000000..a1762149c616 --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/async_client.py @@ -0,0 +1,1702 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_management_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_management_v1.services.organization_vpc_flow_logs_service import ( + pagers, +) +from google.cloud.network_management_v1.types import reachability, vpc_flow_logs +from google.cloud.network_management_v1.types import ( + vpc_flow_logs_config as gcn_vpc_flow_logs_config, +) +from google.cloud.network_management_v1.types import vpc_flow_logs_config + +from .client import OrganizationVpcFlowLogsServiceClient +from .transports.base import ( + DEFAULT_CLIENT_INFO, + OrganizationVpcFlowLogsServiceTransport, +) +from .transports.grpc_asyncio import OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class OrganizationVpcFlowLogsServiceAsyncClient: + """The VPC Flow Logs organization service in the Google Cloud + Network Management API provides organization level + configurations that generate Flow Logs. The service and the + configuration resources created using this service are global. + """ + + _client: OrganizationVpcFlowLogsServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = OrganizationVpcFlowLogsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = OrganizationVpcFlowLogsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + OrganizationVpcFlowLogsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = OrganizationVpcFlowLogsServiceClient._DEFAULT_UNIVERSE + + vpc_flow_logs_config_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.vpc_flow_logs_config_path + ) + parse_vpc_flow_logs_config_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.parse_vpc_flow_logs_config_path + ) + common_billing_account_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.common_folder_path + ) + parse_common_folder_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + OrganizationVpcFlowLogsServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OrganizationVpcFlowLogsServiceAsyncClient: The constructed client. + """ + return OrganizationVpcFlowLogsServiceClient.from_service_account_info.__func__(OrganizationVpcFlowLogsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OrganizationVpcFlowLogsServiceAsyncClient: The constructed client. + """ + return OrganizationVpcFlowLogsServiceClient.from_service_account_file.__func__(OrganizationVpcFlowLogsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return OrganizationVpcFlowLogsServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> OrganizationVpcFlowLogsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OrganizationVpcFlowLogsServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = OrganizationVpcFlowLogsServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + OrganizationVpcFlowLogsServiceTransport, + Callable[..., OrganizationVpcFlowLogsServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the organization vpc flow logs service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OrganizationVpcFlowLogsServiceTransport,Callable[..., OrganizationVpcFlowLogsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OrganizationVpcFlowLogsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = OrganizationVpcFlowLogsServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "credentialsType": None, + }, + ) + + async def list_vpc_flow_logs_configs( + self, + request: Optional[ + Union[vpc_flow_logs.ListVpcFlowLogsConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListVpcFlowLogsConfigsAsyncPager: + r"""Lists all ``VpcFlowLogsConfigs`` in a given organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + async def sample_list_vpc_flow_logs_configs(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.ListVpcFlowLogsConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vpc_flow_logs_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest, dict]]): + The request object. Request for the ``ListVpcFlowLogsConfigs`` method. + parent (:class:`str`): + Required. The parent resource of the VpcFlowLogsConfig, + in one of the following formats: + + - For project-level resourcs: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.pagers.ListVpcFlowLogsConfigsAsyncPager: + Response for the ListVpcFlowLogsConfigs method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.ListVpcFlowLogsConfigsRequest): + request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_vpc_flow_logs_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListVpcFlowLogsConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_vpc_flow_logs_config( + self, + request: Optional[ + Union[vpc_flow_logs.GetVpcFlowLogsConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> vpc_flow_logs_config.VpcFlowLogsConfig: + r"""Gets the details of a specific ``VpcFlowLogsConfig``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + async def sample_get_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.GetVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_vpc_flow_logs_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest, dict]]): + The request object. Request for the ``GetVpcFlowLogsConfig`` method. + name (:class:`str`): + Required. The resource name of the VpcFlowLogsConfig, in + one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_management_v1.types.VpcFlowLogsConfig: + A configuration to generate VPC Flow + Logs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.GetVpcFlowLogsConfigRequest): + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_vpc_flow_logs_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_vpc_flow_logs_config( + self, + request: Optional[ + Union[vpc_flow_logs.CreateVpcFlowLogsConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + vpc_flow_logs_config: Optional[ + gcn_vpc_flow_logs_config.VpcFlowLogsConfig + ] = None, + vpc_flow_logs_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new ``VpcFlowLogsConfig``. If a configuration with the + exact same settings already exists (even if the ID is + different), the creation fails. Notes: + + 1. Creating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - creating + another configuration with the same fields but different + values for the following fields will fail as well: + + - name + - create_time + - update_time + - labels + - description + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + async def sample_create_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() + vpc_flow_logs_config.network = "network_value" + + request = network_management_v1.CreateVpcFlowLogsConfigRequest( + parent="parent_value", + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + # Make the request + operation = client.create_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest, dict]]): + The request object. Request for the ``CreateVpcFlowLogsConfig`` method. + parent (:class:`str`): + Required. The parent resource of the VpcFlowLogsConfig + to create, in one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpc_flow_logs_config (:class:`google.cloud.network_management_v1.types.VpcFlowLogsConfig`): + Required. A ``VpcFlowLogsConfig`` resource + This corresponds to the ``vpc_flow_logs_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpc_flow_logs_config_id (:class:`str`): + Required. ID of the ``VpcFlowLogsConfig``. + This corresponds to the ``vpc_flow_logs_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_management_v1.types.VpcFlowLogsConfig` + A configuration to generate VPC Flow Logs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, vpc_flow_logs_config, vpc_flow_logs_config_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.CreateVpcFlowLogsConfigRequest): + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if vpc_flow_logs_config is not None: + request.vpc_flow_logs_config = vpc_flow_logs_config + if vpc_flow_logs_config_id is not None: + request.vpc_flow_logs_config_id = vpc_flow_logs_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_vpc_flow_logs_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_vpc_flow_logs_config.VpcFlowLogsConfig, + metadata_type=reachability.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_vpc_flow_logs_config( + self, + request: Optional[ + Union[vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, dict] + ] = None, + *, + vpc_flow_logs_config: Optional[ + gcn_vpc_flow_logs_config.VpcFlowLogsConfig + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an existing ``VpcFlowLogsConfig``. If a configuration + with the exact same settings already exists (even if the ID is + different), the creation fails. Notes: + + 1. Updating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - updating + another configuration with the same fields but different + values for the following fields will fail as well: + + - name + - create_time + - update_time + - labels + - description + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + async def sample_update_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() + vpc_flow_logs_config.network = "network_value" + + request = network_management_v1.UpdateVpcFlowLogsConfigRequest( + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + # Make the request + operation = client.update_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_management_v1.types.UpdateVpcFlowLogsConfigRequest, dict]]): + The request object. Request for the ``UpdateVpcFlowLogsConfig`` method. + vpc_flow_logs_config (:class:`google.cloud.network_management_v1.types.VpcFlowLogsConfig`): + Required. Only fields specified in update_mask are + updated. + + This corresponds to the ``vpc_flow_logs_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. At least one path + must be supplied in this field. For example, to change + the state of the configuration to ENABLED, specify + ``update_mask`` = ``"state"``, and the + ``vpc_flow_logs_config`` would be: + ``vpc_flow_logs_config = { name = "projects/my-project/locations/global/vpcFlowLogsConfigs/my-config" state = "ENABLED" }`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_management_v1.types.VpcFlowLogsConfig` + A configuration to generate VPC Flow Logs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [vpc_flow_logs_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.UpdateVpcFlowLogsConfigRequest): + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if vpc_flow_logs_config is not None: + request.vpc_flow_logs_config = vpc_flow_logs_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_vpc_flow_logs_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("vpc_flow_logs_config.name", request.vpc_flow_logs_config.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gcn_vpc_flow_logs_config.VpcFlowLogsConfig, + metadata_type=reachability.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_vpc_flow_logs_config( + self, + request: Optional[ + Union[vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a specific ``VpcFlowLogsConfig``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + async def sample_delete_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.DeleteVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest, dict]]): + The request object. Request for the ``DeleteVpcFlowLogsConfig`` method. + name (:class:`str`): + Required. The resource name of the VpcFlowLogsConfig, in + one of the following formats: + + - For a project-level resource: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For an organization-level resource: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.DeleteVpcFlowLogsConfigRequest): + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_vpc_flow_logs_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=reachability.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "OrganizationVpcFlowLogsServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("OrganizationVpcFlowLogsServiceAsyncClient",) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/client.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/client.py new file mode 100644 index 000000000000..12de466c09c0 --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/client.py @@ -0,0 +1,2153 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_management_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_management_v1.services.organization_vpc_flow_logs_service import ( + pagers, +) +from google.cloud.network_management_v1.types import reachability, vpc_flow_logs +from google.cloud.network_management_v1.types import ( + vpc_flow_logs_config as gcn_vpc_flow_logs_config, +) +from google.cloud.network_management_v1.types import vpc_flow_logs_config + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + OrganizationVpcFlowLogsServiceTransport, +) +from .transports.grpc import OrganizationVpcFlowLogsServiceGrpcTransport +from .transports.grpc_asyncio import OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport +from .transports.rest import OrganizationVpcFlowLogsServiceRestTransport + + +class OrganizationVpcFlowLogsServiceClientMeta(type): + """Metaclass for the OrganizationVpcFlowLogsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[OrganizationVpcFlowLogsServiceTransport]] + _transport_registry["grpc"] = OrganizationVpcFlowLogsServiceGrpcTransport + _transport_registry[ + "grpc_asyncio" + ] = OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = OrganizationVpcFlowLogsServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OrganizationVpcFlowLogsServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class OrganizationVpcFlowLogsServiceClient( + metaclass=OrganizationVpcFlowLogsServiceClientMeta +): + """The VPC Flow Logs organization service in the Google Cloud + Network Management API provides organization level + configurations that generate Flow Logs. The service and the + configuration resources created using this service are global. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "networkmanagement.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "networkmanagement.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OrganizationVpcFlowLogsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OrganizationVpcFlowLogsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OrganizationVpcFlowLogsServiceTransport: + """Returns the transport used by the client instance. + + Returns: + OrganizationVpcFlowLogsServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def vpc_flow_logs_config_path( + project: str, + location: str, + vpc_flow_logs_config: str, + ) -> str: + """Returns a fully-qualified vpc_flow_logs_config string.""" + return "projects/{project}/locations/{location}/vpcFlowLogsConfigs/{vpc_flow_logs_config}".format( + project=project, + location=location, + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + @staticmethod + def parse_vpc_flow_logs_config_path(path: str) -> Dict[str, str]: + """Parses a vpc_flow_logs_config path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/vpcFlowLogsConfigs/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = OrganizationVpcFlowLogsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = OrganizationVpcFlowLogsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + OrganizationVpcFlowLogsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = OrganizationVpcFlowLogsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + OrganizationVpcFlowLogsServiceTransport, + Callable[..., OrganizationVpcFlowLogsServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the organization vpc flow logs service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,OrganizationVpcFlowLogsServiceTransport,Callable[..., OrganizationVpcFlowLogsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the OrganizationVpcFlowLogsServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = OrganizationVpcFlowLogsServiceClient._read_environment_variables() + self._client_cert_source = ( + OrganizationVpcFlowLogsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + OrganizationVpcFlowLogsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, OrganizationVpcFlowLogsServiceTransport + ) + if transport_provided: + # transport is a OrganizationVpcFlowLogsServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(OrganizationVpcFlowLogsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[OrganizationVpcFlowLogsServiceTransport], + Callable[..., OrganizationVpcFlowLogsServiceTransport], + ] = ( + OrganizationVpcFlowLogsServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., OrganizationVpcFlowLogsServiceTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient`.", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "credentialsType": None, + }, + ) + + def list_vpc_flow_logs_configs( + self, + request: Optional[ + Union[vpc_flow_logs.ListVpcFlowLogsConfigsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListVpcFlowLogsConfigsPager: + r"""Lists all ``VpcFlowLogsConfigs`` in a given organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + def sample_list_vpc_flow_logs_configs(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.ListVpcFlowLogsConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vpc_flow_logs_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest, dict]): + The request object. Request for the ``ListVpcFlowLogsConfigs`` method. + parent (str): + Required. The parent resource of the VpcFlowLogsConfig, + in one of the following formats: + + - For project-level resourcs: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.pagers.ListVpcFlowLogsConfigsPager: + Response for the ListVpcFlowLogsConfigs method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.ListVpcFlowLogsConfigsRequest): + request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_vpc_flow_logs_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListVpcFlowLogsConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_vpc_flow_logs_config( + self, + request: Optional[ + Union[vpc_flow_logs.GetVpcFlowLogsConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> vpc_flow_logs_config.VpcFlowLogsConfig: + r"""Gets the details of a specific ``VpcFlowLogsConfig``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + def sample_get_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.GetVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_vpc_flow_logs_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest, dict]): + The request object. Request for the ``GetVpcFlowLogsConfig`` method. + name (str): + Required. The resource name of the VpcFlowLogsConfig, in + one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_management_v1.types.VpcFlowLogsConfig: + A configuration to generate VPC Flow + Logs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.GetVpcFlowLogsConfigRequest): + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_vpc_flow_logs_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_vpc_flow_logs_config( + self, + request: Optional[ + Union[vpc_flow_logs.CreateVpcFlowLogsConfigRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + vpc_flow_logs_config: Optional[ + gcn_vpc_flow_logs_config.VpcFlowLogsConfig + ] = None, + vpc_flow_logs_config_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new ``VpcFlowLogsConfig``. If a configuration with the + exact same settings already exists (even if the ID is + different), the creation fails. Notes: + + 1. Creating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - creating + another configuration with the same fields but different + values for the following fields will fail as well: + + - name + - create_time + - update_time + - labels + - description + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + def sample_create_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() + vpc_flow_logs_config.network = "network_value" + + request = network_management_v1.CreateVpcFlowLogsConfigRequest( + parent="parent_value", + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + # Make the request + operation = client.create_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest, dict]): + The request object. Request for the ``CreateVpcFlowLogsConfig`` method. + parent (str): + Required. The parent resource of the VpcFlowLogsConfig + to create, in one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpc_flow_logs_config (google.cloud.network_management_v1.types.VpcFlowLogsConfig): + Required. A ``VpcFlowLogsConfig`` resource + This corresponds to the ``vpc_flow_logs_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vpc_flow_logs_config_id (str): + Required. ID of the ``VpcFlowLogsConfig``. + This corresponds to the ``vpc_flow_logs_config_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_management_v1.types.VpcFlowLogsConfig` + A configuration to generate VPC Flow Logs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, vpc_flow_logs_config, vpc_flow_logs_config_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.CreateVpcFlowLogsConfigRequest): + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if vpc_flow_logs_config is not None: + request.vpc_flow_logs_config = vpc_flow_logs_config + if vpc_flow_logs_config_id is not None: + request.vpc_flow_logs_config_id = vpc_flow_logs_config_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_vpc_flow_logs_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_vpc_flow_logs_config.VpcFlowLogsConfig, + metadata_type=reachability.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_vpc_flow_logs_config( + self, + request: Optional[ + Union[vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, dict] + ] = None, + *, + vpc_flow_logs_config: Optional[ + gcn_vpc_flow_logs_config.VpcFlowLogsConfig + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an existing ``VpcFlowLogsConfig``. If a configuration + with the exact same settings already exists (even if the ID is + different), the creation fails. Notes: + + 1. Updating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - updating + another configuration with the same fields but different + values for the following fields will fail as well: + + - name + - create_time + - update_time + - labels + - description + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + def sample_update_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() + vpc_flow_logs_config.network = "network_value" + + request = network_management_v1.UpdateVpcFlowLogsConfigRequest( + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + # Make the request + operation = client.update_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_management_v1.types.UpdateVpcFlowLogsConfigRequest, dict]): + The request object. Request for the ``UpdateVpcFlowLogsConfig`` method. + vpc_flow_logs_config (google.cloud.network_management_v1.types.VpcFlowLogsConfig): + Required. Only fields specified in update_mask are + updated. + + This corresponds to the ``vpc_flow_logs_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. At least one path + must be supplied in this field. For example, to change + the state of the configuration to ENABLED, specify + ``update_mask`` = ``"state"``, and the + ``vpc_flow_logs_config`` would be: + ``vpc_flow_logs_config = { name = "projects/my-project/locations/global/vpcFlowLogsConfigs/my-config" state = "ENABLED" }`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.network_management_v1.types.VpcFlowLogsConfig` + A configuration to generate VPC Flow Logs. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [vpc_flow_logs_config, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.UpdateVpcFlowLogsConfigRequest): + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if vpc_flow_logs_config is not None: + request.vpc_flow_logs_config = vpc_flow_logs_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_vpc_flow_logs_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("vpc_flow_logs_config.name", request.vpc_flow_logs_config.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gcn_vpc_flow_logs_config.VpcFlowLogsConfig, + metadata_type=reachability.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_vpc_flow_logs_config( + self, + request: Optional[ + Union[vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a specific ``VpcFlowLogsConfig``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + def sample_delete_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.DeleteVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest, dict]): + The request object. Request for the ``DeleteVpcFlowLogsConfig`` method. + name (str): + Required. The resource name of the VpcFlowLogsConfig, in + one of the following formats: + + - For a project-level resource: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For an organization-level resource: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.DeleteVpcFlowLogsConfigRequest): + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_vpc_flow_logs_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=reachability.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "OrganizationVpcFlowLogsServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("OrganizationVpcFlowLogsServiceClient",) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/pagers.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/pagers.py new file mode 100644 index 000000000000..6502a505d0ce --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/pagers.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.network_management_v1.types import vpc_flow_logs, vpc_flow_logs_config + + +class ListVpcFlowLogsConfigsPager: + """A pager for iterating through ``list_vpc_flow_logs_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``vpc_flow_logs_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListVpcFlowLogsConfigs`` requests and continue to iterate + through the ``vpc_flow_logs_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vpc_flow_logs.ListVpcFlowLogsConfigsResponse], + request: vpc_flow_logs.ListVpcFlowLogsConfigsRequest, + response: vpc_flow_logs.ListVpcFlowLogsConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest): + The initial request object. + response (google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vpc_flow_logs.ListVpcFlowLogsConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[vpc_flow_logs_config.VpcFlowLogsConfig]: + for page in self.pages: + yield from page.vpc_flow_logs_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListVpcFlowLogsConfigsAsyncPager: + """A pager for iterating through ``list_vpc_flow_logs_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``vpc_flow_logs_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListVpcFlowLogsConfigs`` requests and continue to iterate + through the ``vpc_flow_logs_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[vpc_flow_logs.ListVpcFlowLogsConfigsResponse]], + request: vpc_flow_logs.ListVpcFlowLogsConfigsRequest, + response: vpc_flow_logs.ListVpcFlowLogsConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest): + The initial request object. + response (google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[vpc_flow_logs.ListVpcFlowLogsConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[vpc_flow_logs_config.VpcFlowLogsConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.vpc_flow_logs_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/README.rst b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/README.rst new file mode 100644 index 000000000000..49b506137650 --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`OrganizationVpcFlowLogsServiceTransport` is the ABC for all transports. +- public child `OrganizationVpcFlowLogsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseOrganizationVpcFlowLogsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `OrganizationVpcFlowLogsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/__init__.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/__init__.py new file mode 100644 index 000000000000..e0d64c882e70 --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import OrganizationVpcFlowLogsServiceTransport +from .grpc import OrganizationVpcFlowLogsServiceGrpcTransport +from .grpc_asyncio import OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport +from .rest import ( + OrganizationVpcFlowLogsServiceRestInterceptor, + OrganizationVpcFlowLogsServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[OrganizationVpcFlowLogsServiceTransport]] +_transport_registry["grpc"] = OrganizationVpcFlowLogsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = OrganizationVpcFlowLogsServiceRestTransport + +__all__ = ( + "OrganizationVpcFlowLogsServiceTransport", + "OrganizationVpcFlowLogsServiceGrpcTransport", + "OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport", + "OrganizationVpcFlowLogsServiceRestTransport", + "OrganizationVpcFlowLogsServiceRestInterceptor", +) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/base.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/base.py new file mode 100644 index 000000000000..5310ad7de9e4 --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/base.py @@ -0,0 +1,366 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.network_management_v1 import gapic_version as package_version +from google.cloud.network_management_v1.types import vpc_flow_logs, vpc_flow_logs_config + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class OrganizationVpcFlowLogsServiceTransport(abc.ABC): + """Abstract transport class for OrganizationVpcFlowLogsService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "networkmanagement.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networkmanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_vpc_flow_logs_configs: gapic_v1.method.wrap_method( + self.list_vpc_flow_logs_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_vpc_flow_logs_config: gapic_v1.method.wrap_method( + self.get_vpc_flow_logs_config, + default_timeout=None, + client_info=client_info, + ), + self.create_vpc_flow_logs_config: gapic_v1.method.wrap_method( + self.create_vpc_flow_logs_config, + default_timeout=None, + client_info=client_info, + ), + self.update_vpc_flow_logs_config: gapic_v1.method.wrap_method( + self.update_vpc_flow_logs_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_vpc_flow_logs_config: gapic_v1.method.wrap_method( + self.delete_vpc_flow_logs_config, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_vpc_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.ListVpcFlowLogsConfigsRequest], + Union[ + vpc_flow_logs.ListVpcFlowLogsConfigsResponse, + Awaitable[vpc_flow_logs.ListVpcFlowLogsConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.GetVpcFlowLogsConfigRequest], + Union[ + vpc_flow_logs_config.VpcFlowLogsConfig, + Awaitable[vpc_flow_logs_config.VpcFlowLogsConfig], + ], + ]: + raise NotImplementedError() + + @property + def create_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.CreateVpcFlowLogsConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.UpdateVpcFlowLogsConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.DeleteVpcFlowLogsConfigRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("OrganizationVpcFlowLogsServiceTransport",) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/grpc.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/grpc.py new file mode 100644 index 000000000000..5af1c99eb49c --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/grpc.py @@ -0,0 +1,722 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.network_management_v1.types import vpc_flow_logs, vpc_flow_logs_config + +from .base import DEFAULT_CLIENT_INFO, OrganizationVpcFlowLogsServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class OrganizationVpcFlowLogsServiceGrpcTransport( + OrganizationVpcFlowLogsServiceTransport +): + """gRPC backend transport for OrganizationVpcFlowLogsService. + + The VPC Flow Logs organization service in the Google Cloud + Network Management API provides organization level + configurations that generate Flow Logs. The service and the + configuration resources created using this service are global. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "networkmanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networkmanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "networkmanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_vpc_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.ListVpcFlowLogsConfigsRequest], + vpc_flow_logs.ListVpcFlowLogsConfigsResponse, + ]: + r"""Return a callable for the list vpc flow logs configs method over gRPC. + + Lists all ``VpcFlowLogsConfigs`` in a given organization. + + Returns: + Callable[[~.ListVpcFlowLogsConfigsRequest], + ~.ListVpcFlowLogsConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_vpc_flow_logs_configs" not in self._stubs: + self._stubs[ + "list_vpc_flow_logs_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/ListVpcFlowLogsConfigs", + request_serializer=vpc_flow_logs.ListVpcFlowLogsConfigsRequest.serialize, + response_deserializer=vpc_flow_logs.ListVpcFlowLogsConfigsResponse.deserialize, + ) + return self._stubs["list_vpc_flow_logs_configs"] + + @property + def get_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.GetVpcFlowLogsConfigRequest], + vpc_flow_logs_config.VpcFlowLogsConfig, + ]: + r"""Return a callable for the get vpc flow logs config method over gRPC. + + Gets the details of a specific ``VpcFlowLogsConfig``. + + Returns: + Callable[[~.GetVpcFlowLogsConfigRequest], + ~.VpcFlowLogsConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_vpc_flow_logs_config" not in self._stubs: + self._stubs["get_vpc_flow_logs_config"] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/GetVpcFlowLogsConfig", + request_serializer=vpc_flow_logs.GetVpcFlowLogsConfigRequest.serialize, + response_deserializer=vpc_flow_logs_config.VpcFlowLogsConfig.deserialize, + ) + return self._stubs["get_vpc_flow_logs_config"] + + @property + def create_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.CreateVpcFlowLogsConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create vpc flow logs config method over gRPC. + + Creates a new ``VpcFlowLogsConfig``. If a configuration with the + exact same settings already exists (even if the ID is + different), the creation fails. Notes: + + 1. Creating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - creating + another configuration with the same fields but different + values for the following fields will fail as well: + + - name + - create_time + - update_time + - labels + - description + + Returns: + Callable[[~.CreateVpcFlowLogsConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_vpc_flow_logs_config" not in self._stubs: + self._stubs[ + "create_vpc_flow_logs_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/CreateVpcFlowLogsConfig", + request_serializer=vpc_flow_logs.CreateVpcFlowLogsConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_vpc_flow_logs_config"] + + @property + def update_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.UpdateVpcFlowLogsConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update vpc flow logs config method over gRPC. + + Updates an existing ``VpcFlowLogsConfig``. If a configuration + with the exact same settings already exists (even if the ID is + different), the creation fails. Notes: + + 1. Updating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - updating + another configuration with the same fields but different + values for the following fields will fail as well: + + - name + - create_time + - update_time + - labels + - description + + Returns: + Callable[[~.UpdateVpcFlowLogsConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_vpc_flow_logs_config" not in self._stubs: + self._stubs[ + "update_vpc_flow_logs_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/UpdateVpcFlowLogsConfig", + request_serializer=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_vpc_flow_logs_config"] + + @property + def delete_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.DeleteVpcFlowLogsConfigRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete vpc flow logs config method over gRPC. + + Deletes a specific ``VpcFlowLogsConfig``. + + Returns: + Callable[[~.DeleteVpcFlowLogsConfigRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_vpc_flow_logs_config" not in self._stubs: + self._stubs[ + "delete_vpc_flow_logs_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/DeleteVpcFlowLogsConfig", + request_serializer=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_vpc_flow_logs_config"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("OrganizationVpcFlowLogsServiceGrpcTransport",) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/grpc_asyncio.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..4c40ab2ca76c --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/grpc_asyncio.py @@ -0,0 +1,813 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.network_management_v1.types import vpc_flow_logs, vpc_flow_logs_config + +from .base import DEFAULT_CLIENT_INFO, OrganizationVpcFlowLogsServiceTransport +from .grpc import OrganizationVpcFlowLogsServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport( + OrganizationVpcFlowLogsServiceTransport +): + """gRPC AsyncIO backend transport for OrganizationVpcFlowLogsService. + + The VPC Flow Logs organization service in the Google Cloud + Network Management API provides organization level + configurations that generate Flow Logs. The service and the + configuration resources created using this service are global. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "networkmanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "networkmanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networkmanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_vpc_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.ListVpcFlowLogsConfigsRequest], + Awaitable[vpc_flow_logs.ListVpcFlowLogsConfigsResponse], + ]: + r"""Return a callable for the list vpc flow logs configs method over gRPC. + + Lists all ``VpcFlowLogsConfigs`` in a given organization. + + Returns: + Callable[[~.ListVpcFlowLogsConfigsRequest], + Awaitable[~.ListVpcFlowLogsConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_vpc_flow_logs_configs" not in self._stubs: + self._stubs[ + "list_vpc_flow_logs_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/ListVpcFlowLogsConfigs", + request_serializer=vpc_flow_logs.ListVpcFlowLogsConfigsRequest.serialize, + response_deserializer=vpc_flow_logs.ListVpcFlowLogsConfigsResponse.deserialize, + ) + return self._stubs["list_vpc_flow_logs_configs"] + + @property + def get_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.GetVpcFlowLogsConfigRequest], + Awaitable[vpc_flow_logs_config.VpcFlowLogsConfig], + ]: + r"""Return a callable for the get vpc flow logs config method over gRPC. + + Gets the details of a specific ``VpcFlowLogsConfig``. + + Returns: + Callable[[~.GetVpcFlowLogsConfigRequest], + Awaitable[~.VpcFlowLogsConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_vpc_flow_logs_config" not in self._stubs: + self._stubs["get_vpc_flow_logs_config"] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/GetVpcFlowLogsConfig", + request_serializer=vpc_flow_logs.GetVpcFlowLogsConfigRequest.serialize, + response_deserializer=vpc_flow_logs_config.VpcFlowLogsConfig.deserialize, + ) + return self._stubs["get_vpc_flow_logs_config"] + + @property + def create_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.CreateVpcFlowLogsConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create vpc flow logs config method over gRPC. + + Creates a new ``VpcFlowLogsConfig``. If a configuration with the + exact same settings already exists (even if the ID is + different), the creation fails. Notes: + + 1. Creating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - creating + another configuration with the same fields but different + values for the following fields will fail as well: + + - name + - create_time + - update_time + - labels + - description + + Returns: + Callable[[~.CreateVpcFlowLogsConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_vpc_flow_logs_config" not in self._stubs: + self._stubs[ + "create_vpc_flow_logs_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/CreateVpcFlowLogsConfig", + request_serializer=vpc_flow_logs.CreateVpcFlowLogsConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_vpc_flow_logs_config"] + + @property + def update_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.UpdateVpcFlowLogsConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update vpc flow logs config method over gRPC. + + Updates an existing ``VpcFlowLogsConfig``. If a configuration + with the exact same settings already exists (even if the ID is + different), the creation fails. Notes: + + 1. Updating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - updating + another configuration with the same fields but different + values for the following fields will fail as well: + + - name + - create_time + - update_time + - labels + - description + + Returns: + Callable[[~.UpdateVpcFlowLogsConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_vpc_flow_logs_config" not in self._stubs: + self._stubs[ + "update_vpc_flow_logs_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/UpdateVpcFlowLogsConfig", + request_serializer=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_vpc_flow_logs_config"] + + @property + def delete_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.DeleteVpcFlowLogsConfigRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete vpc flow logs config method over gRPC. + + Deletes a specific ``VpcFlowLogsConfig``. + + Returns: + Callable[[~.DeleteVpcFlowLogsConfigRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_vpc_flow_logs_config" not in self._stubs: + self._stubs[ + "delete_vpc_flow_logs_config" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService/DeleteVpcFlowLogsConfig", + request_serializer=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_vpc_flow_logs_config"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_vpc_flow_logs_configs: self._wrap_method( + self.list_vpc_flow_logs_configs, + default_timeout=None, + client_info=client_info, + ), + self.get_vpc_flow_logs_config: self._wrap_method( + self.get_vpc_flow_logs_config, + default_timeout=None, + client_info=client_info, + ), + self.create_vpc_flow_logs_config: self._wrap_method( + self.create_vpc_flow_logs_config, + default_timeout=None, + client_info=client_info, + ), + self.update_vpc_flow_logs_config: self._wrap_method( + self.update_vpc_flow_logs_config, + default_timeout=None, + client_info=client_info, + ), + self.delete_vpc_flow_logs_config: self._wrap_method( + self.delete_vpc_flow_logs_config, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/rest.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/rest.py new file mode 100644 index 000000000000..5749da14192e --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/rest.py @@ -0,0 +1,2836 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.network_management_v1.types import vpc_flow_logs, vpc_flow_logs_config + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseOrganizationVpcFlowLogsServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class OrganizationVpcFlowLogsServiceRestInterceptor: + """Interceptor for OrganizationVpcFlowLogsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the OrganizationVpcFlowLogsServiceRestTransport. + + .. code-block:: python + class MyCustomOrganizationVpcFlowLogsServiceInterceptor(OrganizationVpcFlowLogsServiceRestInterceptor): + def pre_create_vpc_flow_logs_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_vpc_flow_logs_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_vpc_flow_logs_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_vpc_flow_logs_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_vpc_flow_logs_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_vpc_flow_logs_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_vpc_flow_logs_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_vpc_flow_logs_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_vpc_flow_logs_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_vpc_flow_logs_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = OrganizationVpcFlowLogsServiceRestTransport(interceptor=MyCustomOrganizationVpcFlowLogsServiceInterceptor()) + client = OrganizationVpcFlowLogsServiceClient(transport=transport) + + + """ + + def pre_create_vpc_flow_logs_config( + self, + request: vpc_flow_logs.CreateVpcFlowLogsConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.CreateVpcFlowLogsConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_vpc_flow_logs_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_create_vpc_flow_logs_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_vpc_flow_logs_config + + DEPRECATED. Please use the `post_create_vpc_flow_logs_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. This `post_create_vpc_flow_logs_config` interceptor runs + before the `post_create_vpc_flow_logs_config_with_metadata` interceptor. + """ + return response + + def post_create_vpc_flow_logs_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_vpc_flow_logs_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationVpcFlowLogsService server but before it is returned to user code. + + We recommend only using this `post_create_vpc_flow_logs_config_with_metadata` + interceptor in new development instead of the `post_create_vpc_flow_logs_config` interceptor. + When both interceptors are used, this `post_create_vpc_flow_logs_config_with_metadata` interceptor runs after the + `post_create_vpc_flow_logs_config` interceptor. The (possibly modified) response returned by + `post_create_vpc_flow_logs_config` will be passed to + `post_create_vpc_flow_logs_config_with_metadata`. + """ + return response, metadata + + def pre_delete_vpc_flow_logs_config( + self, + request: vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_vpc_flow_logs_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_delete_vpc_flow_logs_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_vpc_flow_logs_config + + DEPRECATED. Please use the `post_delete_vpc_flow_logs_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. This `post_delete_vpc_flow_logs_config` interceptor runs + before the `post_delete_vpc_flow_logs_config_with_metadata` interceptor. + """ + return response + + def post_delete_vpc_flow_logs_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_vpc_flow_logs_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationVpcFlowLogsService server but before it is returned to user code. + + We recommend only using this `post_delete_vpc_flow_logs_config_with_metadata` + interceptor in new development instead of the `post_delete_vpc_flow_logs_config` interceptor. + When both interceptors are used, this `post_delete_vpc_flow_logs_config_with_metadata` interceptor runs after the + `post_delete_vpc_flow_logs_config` interceptor. The (possibly modified) response returned by + `post_delete_vpc_flow_logs_config` will be passed to + `post_delete_vpc_flow_logs_config_with_metadata`. + """ + return response, metadata + + def pre_get_vpc_flow_logs_config( + self, + request: vpc_flow_logs.GetVpcFlowLogsConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.GetVpcFlowLogsConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_vpc_flow_logs_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_get_vpc_flow_logs_config( + self, response: vpc_flow_logs_config.VpcFlowLogsConfig + ) -> vpc_flow_logs_config.VpcFlowLogsConfig: + """Post-rpc interceptor for get_vpc_flow_logs_config + + DEPRECATED. Please use the `post_get_vpc_flow_logs_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. This `post_get_vpc_flow_logs_config` interceptor runs + before the `post_get_vpc_flow_logs_config_with_metadata` interceptor. + """ + return response + + def post_get_vpc_flow_logs_config_with_metadata( + self, + response: vpc_flow_logs_config.VpcFlowLogsConfig, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs_config.VpcFlowLogsConfig, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_vpc_flow_logs_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationVpcFlowLogsService server but before it is returned to user code. + + We recommend only using this `post_get_vpc_flow_logs_config_with_metadata` + interceptor in new development instead of the `post_get_vpc_flow_logs_config` interceptor. + When both interceptors are used, this `post_get_vpc_flow_logs_config_with_metadata` interceptor runs after the + `post_get_vpc_flow_logs_config` interceptor. The (possibly modified) response returned by + `post_get_vpc_flow_logs_config` will be passed to + `post_get_vpc_flow_logs_config_with_metadata`. + """ + return response, metadata + + def pre_list_vpc_flow_logs_configs( + self, + request: vpc_flow_logs.ListVpcFlowLogsConfigsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.ListVpcFlowLogsConfigsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_vpc_flow_logs_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_list_vpc_flow_logs_configs( + self, response: vpc_flow_logs.ListVpcFlowLogsConfigsResponse + ) -> vpc_flow_logs.ListVpcFlowLogsConfigsResponse: + """Post-rpc interceptor for list_vpc_flow_logs_configs + + DEPRECATED. Please use the `post_list_vpc_flow_logs_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. This `post_list_vpc_flow_logs_configs` interceptor runs + before the `post_list_vpc_flow_logs_configs_with_metadata` interceptor. + """ + return response + + def post_list_vpc_flow_logs_configs_with_metadata( + self, + response: vpc_flow_logs.ListVpcFlowLogsConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.ListVpcFlowLogsConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_vpc_flow_logs_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationVpcFlowLogsService server but before it is returned to user code. + + We recommend only using this `post_list_vpc_flow_logs_configs_with_metadata` + interceptor in new development instead of the `post_list_vpc_flow_logs_configs` interceptor. + When both interceptors are used, this `post_list_vpc_flow_logs_configs_with_metadata` interceptor runs after the + `post_list_vpc_flow_logs_configs` interceptor. The (possibly modified) response returned by + `post_list_vpc_flow_logs_configs` will be passed to + `post_list_vpc_flow_logs_configs_with_metadata`. + """ + return response, metadata + + def pre_update_vpc_flow_logs_config( + self, + request: vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_vpc_flow_logs_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_update_vpc_flow_logs_config( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_vpc_flow_logs_config + + DEPRECATED. Please use the `post_update_vpc_flow_logs_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. This `post_update_vpc_flow_logs_config` interceptor runs + before the `post_update_vpc_flow_logs_config_with_metadata` interceptor. + """ + return response + + def post_update_vpc_flow_logs_config_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_vpc_flow_logs_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the OrganizationVpcFlowLogsService server but before it is returned to user code. + + We recommend only using this `post_update_vpc_flow_logs_config_with_metadata` + interceptor in new development instead of the `post_update_vpc_flow_logs_config` interceptor. + When both interceptors are used, this `post_update_vpc_flow_logs_config_with_metadata` interceptor runs after the + `post_update_vpc_flow_logs_config` interceptor. The (possibly modified) response returned by + `post_update_vpc_flow_logs_config` will be passed to + `post_update_vpc_flow_logs_config_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the OrganizationVpcFlowLogsService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the OrganizationVpcFlowLogsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class OrganizationVpcFlowLogsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: OrganizationVpcFlowLogsServiceRestInterceptor + + +class OrganizationVpcFlowLogsServiceRestTransport( + _BaseOrganizationVpcFlowLogsServiceRestTransport +): + """REST backend synchronous transport for OrganizationVpcFlowLogsService. + + The VPC Flow Logs organization service in the Google Cloud + Network Management API provides organization level + configurations that generate Flow Logs. The service and the + configuration resources created using this service are global. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networkmanagement.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[OrganizationVpcFlowLogsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'networkmanagement.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or OrganizationVpcFlowLogsServiceRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/global/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/global/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/global/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/global}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateVpcFlowLogsConfig( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCreateVpcFlowLogsConfig, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationVpcFlowLogsServiceRestTransport.CreateVpcFlowLogsConfig" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: vpc_flow_logs.CreateVpcFlowLogsConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create vpc flow logs + config method over HTTP. + + Args: + request (~.vpc_flow_logs.CreateVpcFlowLogsConfigRequest): + The request object. Request for the ``CreateVpcFlowLogsConfig`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCreateVpcFlowLogsConfig._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_vpc_flow_logs_config( + request, metadata + ) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCreateVpcFlowLogsConfig._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCreateVpcFlowLogsConfig._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCreateVpcFlowLogsConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.CreateVpcFlowLogsConfig", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "CreateVpcFlowLogsConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._CreateVpcFlowLogsConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_vpc_flow_logs_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_vpc_flow_logs_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.create_vpc_flow_logs_config", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "CreateVpcFlowLogsConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteVpcFlowLogsConfig( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteVpcFlowLogsConfig, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationVpcFlowLogsServiceRestTransport.DeleteVpcFlowLogsConfig" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete vpc flow logs + config method over HTTP. + + Args: + request (~.vpc_flow_logs.DeleteVpcFlowLogsConfigRequest): + The request object. Request for the ``DeleteVpcFlowLogsConfig`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteVpcFlowLogsConfig._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_vpc_flow_logs_config( + request, metadata + ) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteVpcFlowLogsConfig._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteVpcFlowLogsConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.DeleteVpcFlowLogsConfig", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "DeleteVpcFlowLogsConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._DeleteVpcFlowLogsConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_vpc_flow_logs_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_vpc_flow_logs_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.delete_vpc_flow_logs_config", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "DeleteVpcFlowLogsConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetVpcFlowLogsConfig( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetVpcFlowLogsConfig, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationVpcFlowLogsServiceRestTransport.GetVpcFlowLogsConfig" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: vpc_flow_logs.GetVpcFlowLogsConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> vpc_flow_logs_config.VpcFlowLogsConfig: + r"""Call the get vpc flow logs config method over HTTP. + + Args: + request (~.vpc_flow_logs.GetVpcFlowLogsConfigRequest): + The request object. Request for the ``GetVpcFlowLogsConfig`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.vpc_flow_logs_config.VpcFlowLogsConfig: + A configuration to generate VPC Flow + Logs. + + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetVpcFlowLogsConfig._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_vpc_flow_logs_config( + request, metadata + ) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetVpcFlowLogsConfig._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetVpcFlowLogsConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.GetVpcFlowLogsConfig", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "GetVpcFlowLogsConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._GetVpcFlowLogsConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vpc_flow_logs_config.VpcFlowLogsConfig() + pb_resp = vpc_flow_logs_config.VpcFlowLogsConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_vpc_flow_logs_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_vpc_flow_logs_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = vpc_flow_logs_config.VpcFlowLogsConfig.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.get_vpc_flow_logs_config", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "GetVpcFlowLogsConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListVpcFlowLogsConfigs( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListVpcFlowLogsConfigs, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationVpcFlowLogsServiceRestTransport.ListVpcFlowLogsConfigs" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: vpc_flow_logs.ListVpcFlowLogsConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> vpc_flow_logs.ListVpcFlowLogsConfigsResponse: + r"""Call the list vpc flow logs + configs method over HTTP. + + Args: + request (~.vpc_flow_logs.ListVpcFlowLogsConfigsRequest): + The request object. Request for the ``ListVpcFlowLogsConfigs`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.vpc_flow_logs.ListVpcFlowLogsConfigsResponse: + Response for the ``ListVpcFlowLogsConfigs`` method. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListVpcFlowLogsConfigs._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_vpc_flow_logs_configs( + request, metadata + ) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListVpcFlowLogsConfigs._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListVpcFlowLogsConfigs._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.ListVpcFlowLogsConfigs", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "ListVpcFlowLogsConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._ListVpcFlowLogsConfigs._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + pb_resp = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_vpc_flow_logs_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_vpc_flow_logs_configs_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.list_vpc_flow_logs_configs", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "ListVpcFlowLogsConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateVpcFlowLogsConfig( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseUpdateVpcFlowLogsConfig, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationVpcFlowLogsServiceRestTransport.UpdateVpcFlowLogsConfig" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update vpc flow logs + config method over HTTP. + + Args: + request (~.vpc_flow_logs.UpdateVpcFlowLogsConfigRequest): + The request object. Request for the ``UpdateVpcFlowLogsConfig`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseUpdateVpcFlowLogsConfig._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_vpc_flow_logs_config( + request, metadata + ) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseUpdateVpcFlowLogsConfig._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseUpdateVpcFlowLogsConfig._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseUpdateVpcFlowLogsConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.UpdateVpcFlowLogsConfig", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "UpdateVpcFlowLogsConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._UpdateVpcFlowLogsConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_vpc_flow_logs_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_vpc_flow_logs_config_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.update_vpc_flow_logs_config", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "UpdateVpcFlowLogsConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.CreateVpcFlowLogsConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateVpcFlowLogsConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.DeleteVpcFlowLogsConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteVpcFlowLogsConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.GetVpcFlowLogsConfigRequest], + vpc_flow_logs_config.VpcFlowLogsConfig, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetVpcFlowLogsConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_vpc_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.ListVpcFlowLogsConfigsRequest], + vpc_flow_logs.ListVpcFlowLogsConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListVpcFlowLogsConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_vpc_flow_logs_config( + self, + ) -> Callable[ + [vpc_flow_logs.UpdateVpcFlowLogsConfigRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateVpcFlowLogsConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetLocation, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("OrganizationVpcFlowLogsServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + OrganizationVpcFlowLogsServiceRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListLocations, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("OrganizationVpcFlowLogsServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetIamPolicy, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("OrganizationVpcFlowLogsServiceRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + OrganizationVpcFlowLogsServiceRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseSetIamPolicy, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("OrganizationVpcFlowLogsServiceRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + OrganizationVpcFlowLogsServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseTestIamPermissions, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash( + "OrganizationVpcFlowLogsServiceRestTransport.TestIamPermissions" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCancelOperation, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("OrganizationVpcFlowLogsServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteOperation, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("OrganizationVpcFlowLogsServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetOperation, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("OrganizationVpcFlowLogsServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + OrganizationVpcFlowLogsServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListOperations, + OrganizationVpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("OrganizationVpcFlowLogsServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = OrganizationVpcFlowLogsServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.OrganizationVpcFlowLogsServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("OrganizationVpcFlowLogsServiceRestTransport",) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/rest_base.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/rest_base.py new file mode 100644 index 000000000000..3ebe0ab2362a --- /dev/null +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/organization_vpc_flow_logs_service/transports/rest_base.py @@ -0,0 +1,624 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.network_management_v1.types import vpc_flow_logs, vpc_flow_logs_config + +from .base import DEFAULT_CLIENT_INFO, OrganizationVpcFlowLogsServiceTransport + + +class _BaseOrganizationVpcFlowLogsServiceRestTransport( + OrganizationVpcFlowLogsServiceTransport +): + """Base REST backend transport for OrganizationVpcFlowLogsService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "networkmanagement.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'networkmanagement.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateVpcFlowLogsConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "vpcFlowLogsConfigId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=organizations/*/locations/*}/vpcFlowLogsConfigs", + "body": "vpc_flow_logs_config", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseCreateVpcFlowLogsConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteVpcFlowLogsConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/vpcFlowLogsConfigs/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseDeleteVpcFlowLogsConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetVpcFlowLogsConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/vpcFlowLogsConfigs/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = vpc_flow_logs.GetVpcFlowLogsConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseGetVpcFlowLogsConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListVpcFlowLogsConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=organizations/*/locations/*}/vpcFlowLogsConfigs", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseListVpcFlowLogsConfigs._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateVpcFlowLogsConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{vpc_flow_logs_config.name=organizations/*/locations/*/vpcFlowLogsConfigs/*}", + "body": "vpc_flow_logs_config", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseOrganizationVpcFlowLogsServiceRestTransport._BaseUpdateVpcFlowLogsConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/global/connectivityTests/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/global/connectivityTests/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/global/connectivityTests/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/global/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/global/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/global/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/global}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseOrganizationVpcFlowLogsServiceRestTransport",) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/transports/rest.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/transports/rest.py index 592b246da15c..75404206e40d 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/transports/rest.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/transports/rest.py @@ -760,24 +760,41 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "uri": "/v1/{name=projects/*/locations/global/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, ], "google.longrunning.Operations.DeleteOperation": [ { "method": "delete", "uri": "/v1/{name=projects/*/locations/global/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ], "google.longrunning.Operations.GetOperation": [ { "method": "get", "uri": "/v1/{name=projects/*/locations/global/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ], "google.longrunning.Operations.ListOperations": [ { "method": "get", "uri": "/v1/{name=projects/*/locations/global}/operations", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, ], } diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/transports/rest_base.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/transports/rest_base.py index 1eae18d749e0..1ea3326be1c8 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/transports/rest_base.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/reachability_service/transports/rest_base.py @@ -418,6 +418,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=projects/*/locations/*}", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}", + }, ] return http_options @@ -443,6 +447,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=projects/*}/locations", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*}/locations", + }, ] return http_options @@ -556,6 +564,11 @@ def _get_http_options(): "uri": "/v1/{name=projects/*/locations/global/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @@ -586,6 +599,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=projects/*/locations/global/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ] return http_options @@ -611,6 +628,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=projects/*/locations/global/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ] return http_options @@ -636,6 +657,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=projects/*/locations/global}/operations", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, ] return http_options diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/async_client.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/async_client.py index 6ea8cf690852..61e20f1ae8bf 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/async_client.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/async_client.py @@ -357,8 +357,14 @@ async def sample_list_vpc_flow_logs_configs(): request (Optional[Union[google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest, dict]]): The request object. Request for the ``ListVpcFlowLogsConfigs`` method. parent (:class:`str`): - Required. The parent resource of the VpcFlowLogsConfig: - ``projects/{project_id}/locations/global`` + Required. The parent resource of the VpcFlowLogsConfig, + in one of the following formats: + + - For project-level resourcs: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -482,9 +488,14 @@ async def sample_get_vpc_flow_logs_config(): request (Optional[Union[google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest, dict]]): The request object. Request for the ``GetVpcFlowLogsConfig`` method. name (:class:`str`): - Required. ``VpcFlowLogsConfig`` resource name using the - form: - ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}`` + Required. The resource name of the VpcFlowLogsConfig, in + one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -571,11 +582,11 @@ async def create_vpc_flow_logs_config( exact same settings already exists (even if the ID is different), the creation fails. Notes: - 1. Creating a configuration with state=DISABLED will fail - 2. The following fields are not considered as ``settings`` for - the purpose of the check mentioned above, therefore - - creating another configuration with the same fields but - different values for the following fields will fail as well: + 1. Creating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - creating + another configuration with the same fields but different + values for the following fields will fail as well: - name - create_time @@ -600,7 +611,7 @@ async def sample_create_vpc_flow_logs_config(): # Initialize request argument(s) vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() - vpc_flow_logs_config.interconnect_attachment = "interconnect_attachment_value" + vpc_flow_logs_config.network = "network_value" request = network_management_v1.CreateVpcFlowLogsConfigRequest( parent="parent_value", @@ -622,9 +633,14 @@ async def sample_create_vpc_flow_logs_config(): request (Optional[Union[google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest, dict]]): The request object. Request for the ``CreateVpcFlowLogsConfig`` method. parent (:class:`str`): - Required. The parent resource of the VPC Flow Logs - configuration to create: - ``projects/{project_id}/locations/global`` + Required. The parent resource of the VpcFlowLogsConfig + to create, in one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -735,11 +751,11 @@ async def update_vpc_flow_logs_config( with the exact same settings already exists (even if the ID is different), the creation fails. Notes: - 1. Updating a configuration with state=DISABLED will fail. - 2. The following fields are not considered as ``settings`` for - the purpose of the check mentioned above, therefore - - updating another configuration with the same fields but - different values for the following fields will fail as well: + 1. Updating a configuration with ``state=DISABLED`` will fail. + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - updating + another configuration with the same fields but different + values for the following fields will fail as well: - name - create_time @@ -764,7 +780,7 @@ async def sample_update_vpc_flow_logs_config(): # Initialize request argument(s) vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() - vpc_flow_logs_config.interconnect_attachment = "interconnect_attachment_value" + vpc_flow_logs_config.network = "network_value" request = network_management_v1.UpdateVpcFlowLogsConfigRequest( vpc_flow_logs_config=vpc_flow_logs_config, @@ -791,9 +807,12 @@ async def sample_update_vpc_flow_logs_config(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. Mask of fields to update. - At least one path must be supplied in - this field. + Required. Mask of fields to update. At least one path + must be supplied in this field. For example, to change + the state of the configuration to ENABLED, specify + ``update_mask`` = ``"state"``, and the + ``vpc_flow_logs_config`` would be: + ``vpc_flow_logs_config = { name = "projects/my-project/locations/global/vpcFlowLogsConfigs/my-config" state = "ENABLED" }`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -923,9 +942,14 @@ async def sample_delete_vpc_flow_logs_config(): request (Optional[Union[google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest, dict]]): The request object. Request for the ``DeleteVpcFlowLogsConfig`` method. name (:class:`str`): - Required. ``VpcFlowLogsConfig`` resource name using the - form: - ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}`` + Required. The resource name of the VpcFlowLogsConfig, in + one of the following formats: + + - For a project-level resource: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For an organization-level resource: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1011,6 +1035,213 @@ async def sample_delete_vpc_flow_logs_config(): # Done; return the response. return response + async def query_org_vpc_flow_logs_configs( + self, + request: Optional[ + Union[vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.QueryOrgVpcFlowLogsConfigsAsyncPager: + r"""QueryOrgVpcFlowLogsConfigs returns a list of all + organization-level VPC Flow Logs configurations + applicable to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + async def sample_query_org_vpc_flow_logs_configs(): + # Create a client + client = network_management_v1.VpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.QueryOrgVpcFlowLogsConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.query_org_vpc_flow_logs_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsRequest, dict]]): + The request object. Request for the ``QueryOrgVpcFlowLogsConfigs`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.QueryOrgVpcFlowLogsConfigsAsyncPager: + Response for the QueryVpcFlowLogsConfigs method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest): + request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.query_org_vpc_flow_logs_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.QueryOrgVpcFlowLogsConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def show_effective_flow_logs_configs( + self, + request: Optional[ + Union[vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ShowEffectiveFlowLogsConfigsAsyncPager: + r"""ShowEffectiveFlowLogsConfigs returns a list of all + VPC Flow Logs configurations applicable to a specified + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + async def sample_show_effective_flow_logs_configs(): + # Create a client + client = network_management_v1.VpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.ShowEffectiveFlowLogsConfigsRequest( + parent="parent_value", + resource="resource_value", + ) + + # Make the request + page_result = client.show_effective_flow_logs_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsRequest, dict]]): + The request object. Request for the ``ShowEffectiveFlowLogsConfigs`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.ShowEffectiveFlowLogsConfigsAsyncPager: + Response for the ShowEffectiveFlowLogsConfigs method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest): + request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.show_effective_flow_logs_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ShowEffectiveFlowLogsConfigsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/client.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/client.py index 3ebba2142421..96805eff82c0 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/client.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/client.py @@ -777,8 +777,14 @@ def sample_list_vpc_flow_logs_configs(): request (Union[google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest, dict]): The request object. Request for the ``ListVpcFlowLogsConfigs`` method. parent (str): - Required. The parent resource of the VpcFlowLogsConfig: - ``projects/{project_id}/locations/global`` + Required. The parent resource of the VpcFlowLogsConfig, + in one of the following formats: + + - For project-level resourcs: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -901,9 +907,14 @@ def sample_get_vpc_flow_logs_config(): request (Union[google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest, dict]): The request object. Request for the ``GetVpcFlowLogsConfig`` method. name (str): - Required. ``VpcFlowLogsConfig`` resource name using the - form: - ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}`` + Required. The resource name of the VpcFlowLogsConfig, in + one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -987,11 +998,11 @@ def create_vpc_flow_logs_config( exact same settings already exists (even if the ID is different), the creation fails. Notes: - 1. Creating a configuration with state=DISABLED will fail - 2. The following fields are not considered as ``settings`` for - the purpose of the check mentioned above, therefore - - creating another configuration with the same fields but - different values for the following fields will fail as well: + 1. Creating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - creating + another configuration with the same fields but different + values for the following fields will fail as well: - name - create_time @@ -1016,7 +1027,7 @@ def sample_create_vpc_flow_logs_config(): # Initialize request argument(s) vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() - vpc_flow_logs_config.interconnect_attachment = "interconnect_attachment_value" + vpc_flow_logs_config.network = "network_value" request = network_management_v1.CreateVpcFlowLogsConfigRequest( parent="parent_value", @@ -1038,9 +1049,14 @@ def sample_create_vpc_flow_logs_config(): request (Union[google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest, dict]): The request object. Request for the ``CreateVpcFlowLogsConfig`` method. parent (str): - Required. The parent resource of the VPC Flow Logs - configuration to create: - ``projects/{project_id}/locations/global`` + Required. The parent resource of the VpcFlowLogsConfig + to create, in one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1150,11 +1166,11 @@ def update_vpc_flow_logs_config( with the exact same settings already exists (even if the ID is different), the creation fails. Notes: - 1. Updating a configuration with state=DISABLED will fail. - 2. The following fields are not considered as ``settings`` for - the purpose of the check mentioned above, therefore - - updating another configuration with the same fields but - different values for the following fields will fail as well: + 1. Updating a configuration with ``state=DISABLED`` will fail. + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - updating + another configuration with the same fields but different + values for the following fields will fail as well: - name - create_time @@ -1179,7 +1195,7 @@ def sample_update_vpc_flow_logs_config(): # Initialize request argument(s) vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() - vpc_flow_logs_config.interconnect_attachment = "interconnect_attachment_value" + vpc_flow_logs_config.network = "network_value" request = network_management_v1.UpdateVpcFlowLogsConfigRequest( vpc_flow_logs_config=vpc_flow_logs_config, @@ -1206,9 +1222,12 @@ def sample_update_vpc_flow_logs_config(): on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. - At least one path must be supplied in - this field. + Required. Mask of fields to update. At least one path + must be supplied in this field. For example, to change + the state of the configuration to ENABLED, specify + ``update_mask`` = ``"state"``, and the + ``vpc_flow_logs_config`` would be: + ``vpc_flow_logs_config = { name = "projects/my-project/locations/global/vpcFlowLogsConfigs/my-config" state = "ENABLED" }`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1337,9 +1356,14 @@ def sample_delete_vpc_flow_logs_config(): request (Union[google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest, dict]): The request object. Request for the ``DeleteVpcFlowLogsConfig`` method. name (str): - Required. ``VpcFlowLogsConfig`` resource name using the - form: - ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}`` + Required. The resource name of the VpcFlowLogsConfig, in + one of the following formats: + + - For a project-level resource: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For an organization-level resource: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1424,6 +1448,213 @@ def sample_delete_vpc_flow_logs_config(): # Done; return the response. return response + def query_org_vpc_flow_logs_configs( + self, + request: Optional[ + Union[vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.QueryOrgVpcFlowLogsConfigsPager: + r"""QueryOrgVpcFlowLogsConfigs returns a list of all + organization-level VPC Flow Logs configurations + applicable to the specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + def sample_query_org_vpc_flow_logs_configs(): + # Create a client + client = network_management_v1.VpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.QueryOrgVpcFlowLogsConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.query_org_vpc_flow_logs_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsRequest, dict]): + The request object. Request for the ``QueryOrgVpcFlowLogsConfigs`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.QueryOrgVpcFlowLogsConfigsPager: + Response for the QueryVpcFlowLogsConfigs method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest): + request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.query_org_vpc_flow_logs_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.QueryOrgVpcFlowLogsConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def show_effective_flow_logs_configs( + self, + request: Optional[ + Union[vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ShowEffectiveFlowLogsConfigsPager: + r"""ShowEffectiveFlowLogsConfigs returns a list of all + VPC Flow Logs configurations applicable to a specified + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_management_v1 + + def sample_show_effective_flow_logs_configs(): + # Create a client + client = network_management_v1.VpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.ShowEffectiveFlowLogsConfigsRequest( + parent="parent_value", + resource="resource_value", + ) + + # Make the request + page_result = client.show_effective_flow_logs_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsRequest, dict]): + The request object. Request for the ``ShowEffectiveFlowLogsConfigs`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.ShowEffectiveFlowLogsConfigsPager: + Response for the ShowEffectiveFlowLogsConfigs method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest): + request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.show_effective_flow_logs_configs + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ShowEffectiveFlowLogsConfigsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "VpcFlowLogsServiceClient": return self diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/pagers.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/pagers.py index 6502a505d0ce..d9a5c6117ec5 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/pagers.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/pagers.py @@ -197,3 +197,325 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class QueryOrgVpcFlowLogsConfigsPager: + """A pager for iterating through ``query_org_vpc_flow_logs_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``vpc_flow_logs_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``QueryOrgVpcFlowLogsConfigs`` requests and continue to iterate + through the ``vpc_flow_logs_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse], + request: vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, + response: vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsRequest): + The initial request object. + response (google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[vpc_flow_logs_config.VpcFlowLogsConfig]: + for page in self.pages: + yield from page.vpc_flow_logs_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class QueryOrgVpcFlowLogsConfigsAsyncPager: + """A pager for iterating through ``query_org_vpc_flow_logs_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``vpc_flow_logs_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``QueryOrgVpcFlowLogsConfigs`` requests and continue to iterate + through the ``vpc_flow_logs_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse] + ], + request: vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, + response: vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsRequest): + The initial request object. + response (google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[vpc_flow_logs_config.VpcFlowLogsConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.vpc_flow_logs_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ShowEffectiveFlowLogsConfigsPager: + """A pager for iterating through ``show_effective_flow_logs_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``effective_flow_logs_configs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ShowEffectiveFlowLogsConfigs`` requests and continue to iterate + through the ``effective_flow_logs_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse], + request: vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, + response: vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsRequest): + The initial request object. + response (google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[vpc_flow_logs_config.EffectiveVpcFlowLogsConfig]: + for page in self.pages: + yield from page.effective_flow_logs_configs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ShowEffectiveFlowLogsConfigsAsyncPager: + """A pager for iterating through ``show_effective_flow_logs_configs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``effective_flow_logs_configs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ShowEffectiveFlowLogsConfigs`` requests and continue to iterate + through the ``effective_flow_logs_configs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse] + ], + request: vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, + response: vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsRequest): + The initial request object. + response (google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[vpc_flow_logs_config.EffectiveVpcFlowLogsConfig]: + async def async_generator(): + async for page in self.pages: + for response in page.effective_flow_logs_configs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/base.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/base.py index 8ae5943ee186..46f7577ed551 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/base.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/base.py @@ -162,6 +162,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.query_org_vpc_flow_logs_configs: gapic_v1.method.wrap_method( + self.query_org_vpc_flow_logs_configs, + default_timeout=None, + client_info=client_info, + ), + self.show_effective_flow_logs_configs: gapic_v1.method.wrap_method( + self.show_effective_flow_logs_configs, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -274,6 +284,30 @@ def delete_vpc_flow_logs_config( ]: raise NotImplementedError() + @property + def query_org_vpc_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest], + Union[ + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse, + Awaitable[vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse], + ], + ]: + raise NotImplementedError() + + @property + def show_effective_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest], + Union[ + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse, + Awaitable[vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/grpc.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/grpc.py index af660c5faed1..856909cdbe8e 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/grpc.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/grpc.py @@ -417,11 +417,11 @@ def create_vpc_flow_logs_config( exact same settings already exists (even if the ID is different), the creation fails. Notes: - 1. Creating a configuration with state=DISABLED will fail - 2. The following fields are not considered as ``settings`` for - the purpose of the check mentioned above, therefore - - creating another configuration with the same fields but - different values for the following fields will fail as well: + 1. Creating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - creating + another configuration with the same fields but different + values for the following fields will fail as well: - name - create_time @@ -461,11 +461,11 @@ def update_vpc_flow_logs_config( with the exact same settings already exists (even if the ID is different), the creation fails. Notes: - 1. Updating a configuration with state=DISABLED will fail. - 2. The following fields are not considered as ``settings`` for - the purpose of the check mentioned above, therefore - - updating another configuration with the same fields but - different values for the following fields will fail as well: + 1. Updating a configuration with ``state=DISABLED`` will fail. + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - updating + another configuration with the same fields but different + values for the following fields will fail as well: - name - create_time @@ -523,6 +523,74 @@ def delete_vpc_flow_logs_config( ) return self._stubs["delete_vpc_flow_logs_config"] + @property + def query_org_vpc_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest], + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse, + ]: + r"""Return a callable for the query org vpc flow logs + configs method over gRPC. + + QueryOrgVpcFlowLogsConfigs returns a list of all + organization-level VPC Flow Logs configurations + applicable to the specified project. + + Returns: + Callable[[~.QueryOrgVpcFlowLogsConfigsRequest], + ~.QueryOrgVpcFlowLogsConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_org_vpc_flow_logs_configs" not in self._stubs: + self._stubs[ + "query_org_vpc_flow_logs_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.VpcFlowLogsService/QueryOrgVpcFlowLogsConfigs", + request_serializer=vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest.serialize, + response_deserializer=vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse.deserialize, + ) + return self._stubs["query_org_vpc_flow_logs_configs"] + + @property + def show_effective_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest], + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse, + ]: + r"""Return a callable for the show effective flow logs + configs method over gRPC. + + ShowEffectiveFlowLogsConfigs returns a list of all + VPC Flow Logs configurations applicable to a specified + resource. + + Returns: + Callable[[~.ShowEffectiveFlowLogsConfigsRequest], + ~.ShowEffectiveFlowLogsConfigsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "show_effective_flow_logs_configs" not in self._stubs: + self._stubs[ + "show_effective_flow_logs_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.VpcFlowLogsService/ShowEffectiveFlowLogsConfigs", + request_serializer=vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest.serialize, + response_deserializer=vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse.deserialize, + ) + return self._stubs["show_effective_flow_logs_configs"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/grpc_asyncio.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/grpc_asyncio.py index ceeb63b4c39f..3f7070f95162 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/grpc_asyncio.py @@ -426,11 +426,11 @@ def create_vpc_flow_logs_config( exact same settings already exists (even if the ID is different), the creation fails. Notes: - 1. Creating a configuration with state=DISABLED will fail - 2. The following fields are not considered as ``settings`` for - the purpose of the check mentioned above, therefore - - creating another configuration with the same fields but - different values for the following fields will fail as well: + 1. Creating a configuration with ``state=DISABLED`` will fail + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - creating + another configuration with the same fields but different + values for the following fields will fail as well: - name - create_time @@ -471,11 +471,11 @@ def update_vpc_flow_logs_config( with the exact same settings already exists (even if the ID is different), the creation fails. Notes: - 1. Updating a configuration with state=DISABLED will fail. - 2. The following fields are not considered as ``settings`` for - the purpose of the check mentioned above, therefore - - updating another configuration with the same fields but - different values for the following fields will fail as well: + 1. Updating a configuration with ``state=DISABLED`` will fail. + 2. The following fields are not considered as settings for the + purpose of the check mentioned above, therefore - updating + another configuration with the same fields but different + values for the following fields will fail as well: - name - create_time @@ -534,6 +534,74 @@ def delete_vpc_flow_logs_config( ) return self._stubs["delete_vpc_flow_logs_config"] + @property + def query_org_vpc_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest], + Awaitable[vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse], + ]: + r"""Return a callable for the query org vpc flow logs + configs method over gRPC. + + QueryOrgVpcFlowLogsConfigs returns a list of all + organization-level VPC Flow Logs configurations + applicable to the specified project. + + Returns: + Callable[[~.QueryOrgVpcFlowLogsConfigsRequest], + Awaitable[~.QueryOrgVpcFlowLogsConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "query_org_vpc_flow_logs_configs" not in self._stubs: + self._stubs[ + "query_org_vpc_flow_logs_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.VpcFlowLogsService/QueryOrgVpcFlowLogsConfigs", + request_serializer=vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest.serialize, + response_deserializer=vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse.deserialize, + ) + return self._stubs["query_org_vpc_flow_logs_configs"] + + @property + def show_effective_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest], + Awaitable[vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse], + ]: + r"""Return a callable for the show effective flow logs + configs method over gRPC. + + ShowEffectiveFlowLogsConfigs returns a list of all + VPC Flow Logs configurations applicable to a specified + resource. + + Returns: + Callable[[~.ShowEffectiveFlowLogsConfigsRequest], + Awaitable[~.ShowEffectiveFlowLogsConfigsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "show_effective_flow_logs_configs" not in self._stubs: + self._stubs[ + "show_effective_flow_logs_configs" + ] = self._logged_channel.unary_unary( + "/google.cloud.networkmanagement.v1.VpcFlowLogsService/ShowEffectiveFlowLogsConfigs", + request_serializer=vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest.serialize, + response_deserializer=vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse.deserialize, + ) + return self._stubs["show_effective_flow_logs_configs"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -562,6 +630,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.query_org_vpc_flow_logs_configs: self._wrap_method( + self.query_org_vpc_flow_logs_configs, + default_timeout=None, + client_info=client_info, + ), + self.show_effective_flow_logs_configs: self._wrap_method( + self.show_effective_flow_logs_configs, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/rest.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/rest.py index 9395f76f55eb..140d14e2c7e3 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/rest.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/rest.py @@ -108,6 +108,22 @@ def post_list_vpc_flow_logs_configs(self, response): logging.log(f"Received response: {response}") return response + def pre_query_org_vpc_flow_logs_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_org_vpc_flow_logs_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_show_effective_flow_logs_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_show_effective_flow_logs_configs(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_vpc_flow_logs_config(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -323,6 +339,110 @@ def post_list_vpc_flow_logs_configs_with_metadata( """ return response, metadata + def pre_query_org_vpc_flow_logs_configs( + self, + request: vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for query_org_vpc_flow_logs_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpcFlowLogsService server. + """ + return request, metadata + + def post_query_org_vpc_flow_logs_configs( + self, response: vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse + ) -> vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse: + """Post-rpc interceptor for query_org_vpc_flow_logs_configs + + DEPRECATED. Please use the `post_query_org_vpc_flow_logs_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the VpcFlowLogsService server but before + it is returned to user code. This `post_query_org_vpc_flow_logs_configs` interceptor runs + before the `post_query_org_vpc_flow_logs_configs_with_metadata` interceptor. + """ + return response + + def post_query_org_vpc_flow_logs_configs_with_metadata( + self, + response: vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for query_org_vpc_flow_logs_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpcFlowLogsService server but before it is returned to user code. + + We recommend only using this `post_query_org_vpc_flow_logs_configs_with_metadata` + interceptor in new development instead of the `post_query_org_vpc_flow_logs_configs` interceptor. + When both interceptors are used, this `post_query_org_vpc_flow_logs_configs_with_metadata` interceptor runs after the + `post_query_org_vpc_flow_logs_configs` interceptor. The (possibly modified) response returned by + `post_query_org_vpc_flow_logs_configs` will be passed to + `post_query_org_vpc_flow_logs_configs_with_metadata`. + """ + return response, metadata + + def pre_show_effective_flow_logs_configs( + self, + request: vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for show_effective_flow_logs_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the VpcFlowLogsService server. + """ + return request, metadata + + def post_show_effective_flow_logs_configs( + self, response: vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse + ) -> vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse: + """Post-rpc interceptor for show_effective_flow_logs_configs + + DEPRECATED. Please use the `post_show_effective_flow_logs_configs_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the VpcFlowLogsService server but before + it is returned to user code. This `post_show_effective_flow_logs_configs` interceptor runs + before the `post_show_effective_flow_logs_configs_with_metadata` interceptor. + """ + return response + + def post_show_effective_flow_logs_configs_with_metadata( + self, + response: vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for show_effective_flow_logs_configs + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the VpcFlowLogsService server but before it is returned to user code. + + We recommend only using this `post_show_effective_flow_logs_configs_with_metadata` + interceptor in new development instead of the `post_show_effective_flow_logs_configs` interceptor. + When both interceptors are used, this `post_show_effective_flow_logs_configs_with_metadata` interceptor runs after the + `post_show_effective_flow_logs_configs` interceptor. The (possibly modified) response returned by + `post_show_effective_flow_logs_configs` will be passed to + `post_show_effective_flow_logs_configs_with_metadata`. + """ + return response, metadata + def pre_update_vpc_flow_logs_config( self, request: vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, @@ -698,24 +818,41 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "uri": "/v1/{name=projects/*/locations/global/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, ], "google.longrunning.Operations.DeleteOperation": [ { "method": "delete", "uri": "/v1/{name=projects/*/locations/global/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ], "google.longrunning.Operations.GetOperation": [ { "method": "get", "uri": "/v1/{name=projects/*/locations/global/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ], "google.longrunning.Operations.ListOperations": [ { "method": "get", "uri": "/v1/{name=projects/*/locations/global}/operations", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, ], } @@ -1348,6 +1485,318 @@ def __call__( ) return resp + class _QueryOrgVpcFlowLogsConfigs( + _BaseVpcFlowLogsServiceRestTransport._BaseQueryOrgVpcFlowLogsConfigs, + VpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("VpcFlowLogsServiceRestTransport.QueryOrgVpcFlowLogsConfigs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse: + r"""Call the query org vpc flow logs + configs method over HTTP. + + Args: + request (~.vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest): + The request object. Request for the ``QueryOrgVpcFlowLogsConfigs`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse: + Response for the ``QueryVpcFlowLogsConfigs`` method. + """ + + http_options = ( + _BaseVpcFlowLogsServiceRestTransport._BaseQueryOrgVpcFlowLogsConfigs._get_http_options() + ) + + request, metadata = self._interceptor.pre_query_org_vpc_flow_logs_configs( + request, metadata + ) + transcoded_request = _BaseVpcFlowLogsServiceRestTransport._BaseQueryOrgVpcFlowLogsConfigs._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseVpcFlowLogsServiceRestTransport._BaseQueryOrgVpcFlowLogsConfigs._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.VpcFlowLogsServiceClient.QueryOrgVpcFlowLogsConfigs", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", + "rpcName": "QueryOrgVpcFlowLogsConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = VpcFlowLogsServiceRestTransport._QueryOrgVpcFlowLogsConfigs._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse() + pb_resp = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_query_org_vpc_flow_logs_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_query_org_vpc_flow_logs_configs_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.VpcFlowLogsServiceClient.query_org_vpc_flow_logs_configs", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", + "rpcName": "QueryOrgVpcFlowLogsConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ShowEffectiveFlowLogsConfigs( + _BaseVpcFlowLogsServiceRestTransport._BaseShowEffectiveFlowLogsConfigs, + VpcFlowLogsServiceRestStub, + ): + def __hash__(self): + return hash("VpcFlowLogsServiceRestTransport.ShowEffectiveFlowLogsConfigs") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse: + r"""Call the show effective flow logs + configs method over HTTP. + + Args: + request (~.vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest): + The request object. Request for the ``ShowEffectiveFlowLogsConfigs`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse: + Response for the ``ShowEffectiveFlowLogsConfigs`` + method. + + """ + + http_options = ( + _BaseVpcFlowLogsServiceRestTransport._BaseShowEffectiveFlowLogsConfigs._get_http_options() + ) + + request, metadata = self._interceptor.pre_show_effective_flow_logs_configs( + request, metadata + ) + transcoded_request = _BaseVpcFlowLogsServiceRestTransport._BaseShowEffectiveFlowLogsConfigs._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseVpcFlowLogsServiceRestTransport._BaseShowEffectiveFlowLogsConfigs._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkmanagement_v1.VpcFlowLogsServiceClient.ShowEffectiveFlowLogsConfigs", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", + "rpcName": "ShowEffectiveFlowLogsConfigs", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = VpcFlowLogsServiceRestTransport._ShowEffectiveFlowLogsConfigs._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse() + pb_resp = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_show_effective_flow_logs_configs(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_show_effective_flow_logs_configs_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkmanagement_v1.VpcFlowLogsServiceClient.show_effective_flow_logs_configs", + extra={ + "serviceName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", + "rpcName": "ShowEffectiveFlowLogsConfigs", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _UpdateVpcFlowLogsConfig( _BaseVpcFlowLogsServiceRestTransport._BaseUpdateVpcFlowLogsConfig, VpcFlowLogsServiceRestStub, @@ -1547,6 +1996,28 @@ def list_vpc_flow_logs_configs( # In C++ this would require a dynamic_cast return self._ListVpcFlowLogsConfigs(self._session, self._host, self._interceptor) # type: ignore + @property + def query_org_vpc_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest], + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryOrgVpcFlowLogsConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def show_effective_flow_logs_configs( + self, + ) -> Callable[ + [vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest], + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ShowEffectiveFlowLogsConfigs(self._session, self._host, self._interceptor) # type: ignore + @property def update_vpc_flow_logs_config( self, diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/rest_base.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/rest_base.py index 3f6ad8490cb1..d72408b47fae 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/rest_base.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/services/vpc_flow_logs_service/transports/rest_base.py @@ -291,6 +291,102 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseQueryOrgVpcFlowLogsConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/vpcFlowLogsConfigs:queryOrgVpcFlowLogsConfigs", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseVpcFlowLogsServiceRestTransport._BaseQueryOrgVpcFlowLogsConfigs._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseShowEffectiveFlowLogsConfigs: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "resource": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/vpcFlowLogsConfigs:showEffectiveFlowLogsConfigs", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseVpcFlowLogsServiceRestTransport._BaseShowEffectiveFlowLogsConfigs._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateVpcFlowLogsConfig: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -361,6 +457,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=projects/*/locations/*}", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}", + }, ] return http_options @@ -386,6 +486,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=projects/*}/locations", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*}/locations", + }, ] return http_options @@ -499,6 +603,11 @@ def _get_http_options(): "uri": "/v1/{name=projects/*/locations/global/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @@ -529,6 +638,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=projects/*/locations/global/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ] return http_options @@ -554,6 +667,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=projects/*/locations/global/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, ] return http_options @@ -579,6 +696,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=projects/*/locations/global}/operations", }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, ] return http_options diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/__init__.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/__init__.py index f16fd271419d..cb0c83f545d3 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/__init__.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/__init__.py @@ -46,7 +46,9 @@ ForwardingRuleInfo, GKEMasterInfo, GoogleServiceInfo, + HybridSubnetInfo, InstanceInfo, + InterconnectAttachmentInfo, LoadBalancerBackend, LoadBalancerBackendInfo, LoadBalancerInfo, @@ -72,9 +74,13 @@ GetVpcFlowLogsConfigRequest, ListVpcFlowLogsConfigsRequest, ListVpcFlowLogsConfigsResponse, + QueryOrgVpcFlowLogsConfigsRequest, + QueryOrgVpcFlowLogsConfigsResponse, + ShowEffectiveFlowLogsConfigsRequest, + ShowEffectiveFlowLogsConfigsResponse, UpdateVpcFlowLogsConfigRequest, ) -from .vpc_flow_logs_config import VpcFlowLogsConfig +from .vpc_flow_logs_config import EffectiveVpcFlowLogsConfig, VpcFlowLogsConfig __all__ = ( "ConnectivityTest", @@ -105,7 +111,9 @@ "ForwardingRuleInfo", "GKEMasterInfo", "GoogleServiceInfo", + "HybridSubnetInfo", "InstanceInfo", + "InterconnectAttachmentInfo", "LoadBalancerBackend", "LoadBalancerBackendInfo", "LoadBalancerInfo", @@ -129,6 +137,11 @@ "GetVpcFlowLogsConfigRequest", "ListVpcFlowLogsConfigsRequest", "ListVpcFlowLogsConfigsResponse", + "QueryOrgVpcFlowLogsConfigsRequest", + "QueryOrgVpcFlowLogsConfigsResponse", + "ShowEffectiveFlowLogsConfigsRequest", + "ShowEffectiveFlowLogsConfigsResponse", "UpdateVpcFlowLogsConfigRequest", + "EffectiveVpcFlowLogsConfig", "VpcFlowLogsConfig", ) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py index 6524ff13563b..a343b8b88a42 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/connectivity_test.py @@ -362,11 +362,12 @@ class CloudRunRevisionEndpoint(proto.Message): A `Cloud Run `__ `revision `__ URI. The format is: - ``projects/{project}/locations/{location}/revisions/{revision}`` + projects/{project}/locations/{location}/revisions/{revision} service_uri (str): - Output only. The URI of the Cloud Run service that the - revision belongs to. The format is: - ``projects/{project}/locations/{location}/services/{service}`` + Output only. The URI of the Cloud Run service + that the revision belongs to. The format is: + + projects/{project}/locations/{location}/services/{service} """ uri: str = proto.Field( @@ -616,13 +617,17 @@ class ProbingDetails(proto.Message): direction: from the source to the destination endpoint. destination_egress_location (google.cloud.network_management_v1.types.ProbingDetails.EdgeLocation): - The EdgeLocation from which a packet destined - for/originating from the internet will egress/ingress the - Google network. This will only be populated for a - connectivity test which has an internet destination/source - address. The absence of this field *must not* be used as an - indication that the destination/source is part of the Google - network. + The EdgeLocation from which a packet, destined to the + internet, will egress the Google network. This will only be + populated for a connectivity test which has an internet + destination address. The absence of this field *must not* be + used as an indication that the destination is part of the + Google network. + edge_responses (MutableSequence[google.cloud.network_management_v1.types.ProbingDetails.SingleEdgeResponse]): + Probing results for all edge devices. + probed_all_devices (bool): + Whether all relevant edge devices were + probed. """ class ProbingResult(proto.Enum): @@ -685,6 +690,63 @@ class EdgeLocation(proto.Message): number=1, ) + class SingleEdgeResponse(proto.Message): + r"""Probing results for a single edge device. + + Attributes: + result (google.cloud.network_management_v1.types.ProbingDetails.ProbingResult): + The overall result of active probing for this + egress device. + sent_probe_count (int): + Number of probes sent. + successful_probe_count (int): + Number of probes that reached the + destination. + probing_latency (google.cloud.network_management_v1.types.LatencyDistribution): + Latency as measured by active probing in one + direction: from the source to the destination + endpoint. + destination_egress_location (google.cloud.network_management_v1.types.ProbingDetails.EdgeLocation): + The EdgeLocation from which a packet, destined to the + internet, will egress the Google network. This will only be + populated for a connectivity test which has an internet + destination address. The absence of this field *must not* be + used as an indication that the destination is part of the + Google network. + destination_router (str): + Router name in the format + '{router}.{metroshard}'. For example: + pf01.aaa01, pr02.aaa01. + """ + + result: "ProbingDetails.ProbingResult" = proto.Field( + proto.ENUM, + number=1, + enum="ProbingDetails.ProbingResult", + ) + sent_probe_count: int = proto.Field( + proto.INT32, + number=2, + ) + successful_probe_count: int = proto.Field( + proto.INT32, + number=3, + ) + probing_latency: "LatencyDistribution" = proto.Field( + proto.MESSAGE, + number=4, + message="LatencyDistribution", + ) + destination_egress_location: "ProbingDetails.EdgeLocation" = proto.Field( + proto.MESSAGE, + number=5, + message="ProbingDetails.EdgeLocation", + ) + destination_router: str = proto.Field( + proto.STRING, + number=6, + ) + result: ProbingResult = proto.Field( proto.ENUM, number=1, @@ -728,6 +790,15 @@ class EdgeLocation(proto.Message): number=9, message=EdgeLocation, ) + edge_responses: MutableSequence[SingleEdgeResponse] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=SingleEdgeResponse, + ) + probed_all_devices: bool = proto.Field( + proto.BOOL, + number=11, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/trace.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/trace.py index 46461d78fb5a..feca905f13a2 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/trace.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/trace.py @@ -33,8 +33,10 @@ "ForwardingRuleInfo", "LoadBalancerInfo", "LoadBalancerBackend", + "HybridSubnetInfo", "VpnGatewayInfo", "VpnTunnelInfo", + "InterconnectAttachmentInfo", "EndpointInfo", "DeliverInfo", "ForwardInfo", @@ -215,6 +217,10 @@ class Step(proto.Message): Display information of a Compute Engine forwarding rule. + This field is a member of `oneof`_ ``step_info``. + hybrid_subnet (google.cloud.network_management_v1.types.HybridSubnetInfo): + Display information of a hybrid subnet. + This field is a member of `oneof`_ ``step_info``. vpn_gateway (google.cloud.network_management_v1.types.VpnGatewayInfo): Display information of a Compute Engine VPN @@ -225,6 +231,11 @@ class Step(proto.Message): Display information of a Compute Engine VPN tunnel. + This field is a member of `oneof`_ ``step_info``. + interconnect_attachment (google.cloud.network_management_v1.types.InterconnectAttachmentInfo): + Display information of an interconnect + attachment. + This field is a member of `oneof`_ ``step_info``. vpc_connector (google.cloud.network_management_v1.types.VpcConnectorInfo): Display information of a VPC connector. @@ -411,21 +422,24 @@ class State(proto.Enum): Forwarding state: arriving at a Compute Engine instance. ARRIVE_AT_INTERNAL_LOAD_BALANCER (10): - Forwarding state: arriving at a Compute Engine internal load - balancer. Deprecated in favor of the - ``ANALYZE_LOAD_BALANCER_BACKEND`` state, not used in new - tests. + Forwarding state: arriving at a Compute + Engine internal load balancer. ARRIVE_AT_EXTERNAL_LOAD_BALANCER (11): - Forwarding state: arriving at a Compute Engine external load - balancer. Deprecated in favor of the - ``ANALYZE_LOAD_BALANCER_BACKEND`` state, not used in new - tests. + Forwarding state: arriving at a Compute + Engine external load balancer. + ARRIVE_AT_HYBRID_SUBNET (38): + Forwarding state: arriving at a hybrid + subnet. Appropriate routing configuration will + be determined here. ARRIVE_AT_VPN_GATEWAY (12): Forwarding state: arriving at a Cloud VPN gateway. ARRIVE_AT_VPN_TUNNEL (13): Forwarding state: arriving at a Cloud VPN tunnel. + ARRIVE_AT_INTERCONNECT_ATTACHMENT (37): + Forwarding state: arriving at an interconnect + attachment. ARRIVE_AT_VPC_CONNECTOR (24): Forwarding state: arriving at a VPC connector. @@ -438,7 +452,8 @@ class State(proto.Enum): from a serverless endpoint forwarded through public (external) connectivity. NAT (14): - Transition state: packet header translated. + Transition state: packet header translated. The ``nat`` + field is populated with the translation information. PROXY_CONNECTION (15): Transition state: original connection is terminated and a new proxied connection is @@ -481,8 +496,10 @@ class State(proto.Enum): ARRIVE_AT_INSTANCE = 9 ARRIVE_AT_INTERNAL_LOAD_BALANCER = 10 ARRIVE_AT_EXTERNAL_LOAD_BALANCER = 11 + ARRIVE_AT_HYBRID_SUBNET = 38 ARRIVE_AT_VPN_GATEWAY = 12 ARRIVE_AT_VPN_TUNNEL = 13 + ARRIVE_AT_INTERCONNECT_ATTACHMENT = 37 ARRIVE_AT_VPC_CONNECTOR = 24 DIRECT_VPC_EGRESS_CONNECTION = 35 SERVERLESS_EXTERNAL_CONNECTION = 36 @@ -547,6 +564,12 @@ class State(proto.Enum): oneof="step_info", message="ForwardingRuleInfo", ) + hybrid_subnet: "HybridSubnetInfo" = proto.Field( + proto.MESSAGE, + number=36, + oneof="step_info", + message="HybridSubnetInfo", + ) vpn_gateway: "VpnGatewayInfo" = proto.Field( proto.MESSAGE, number=10, @@ -559,6 +582,12 @@ class State(proto.Enum): oneof="step_info", message="VpnTunnelInfo", ) + interconnect_attachment: "InterconnectAttachmentInfo" = proto.Field( + proto.MESSAGE, + number=35, + oneof="step_info", + message="InterconnectAttachmentInfo", + ) vpc_connector: "VpcConnectorInfo" = proto.Field( proto.MESSAGE, number=21, @@ -712,8 +741,30 @@ class InstanceInfo(proto.Message): psc_network_attachment_uri (str): URI of the PSC network attachment the NIC is attached to (if relevant). + running (bool): + Indicates whether the Compute Engine instance is running. + Deprecated: use the ``status`` field instead. + status (google.cloud.network_management_v1.types.InstanceInfo.Status): + The status of the instance. """ + class Status(proto.Enum): + r"""The status of the instance. We treat all states other than + "RUNNING" as not running. + + Values: + STATUS_UNSPECIFIED (0): + Default unspecified value. + RUNNING (1): + The instance is running. + NOT_RUNNING (2): + The instance has any status other than + "RUNNING". + """ + STATUS_UNSPECIFIED = 0 + RUNNING = 1 + NOT_RUNNING = 2 + display_name: str = proto.Field( proto.STRING, number=1, @@ -750,11 +801,20 @@ class InstanceInfo(proto.Message): proto.STRING, number=9, ) + running: bool = proto.Field( + proto.BOOL, + number=10, + ) + status: Status = proto.Field( + proto.ENUM, + number=11, + enum=Status, + ) class NetworkInfo(proto.Message): r"""For display only. Metadata associated with a Compute Engine - network. Next ID: 7 + network. Attributes: display_name (str): @@ -835,6 +895,13 @@ class FirewallInfo(proto.Message): rules. firewall_rule_type (google.cloud.network_management_v1.types.FirewallInfo.FirewallRuleType): The firewall rule's type. + policy_priority (int): + The priority of the firewall policy that this + rule is associated with. This field is not + applicable to VPC firewall rules and implied VPC + firewall rules. + target_type (google.cloud.network_management_v1.types.FirewallInfo.TargetType): + Target type of the firewall rule. """ class FirewallRuleType(proto.Enum): @@ -894,6 +961,24 @@ class FirewallRuleType(proto.Enum): TRACKING_STATE = 101 ANALYSIS_SKIPPED = 102 + class TargetType(proto.Enum): + r"""Target type of the firewall rule. + + Values: + TARGET_TYPE_UNSPECIFIED (0): + Target type is not specified. In this case we + treat the rule as applying to INSTANCES target + type. + INSTANCES (1): + Firewall rule applies to instances. + INTERNAL_MANAGED_LB (2): + Firewall rule applies to internal managed + load balancers. + """ + TARGET_TYPE_UNSPECIFIED = 0 + INSTANCES = 1 + INTERNAL_MANAGED_LB = 2 + display_name: str = proto.Field( proto.STRING, number=1, @@ -939,6 +1024,15 @@ class FirewallRuleType(proto.Enum): number=10, enum=FirewallRuleType, ) + policy_priority: int = proto.Field( + proto.INT32, + number=12, + ) + target_type: TargetType = proto.Field( + proto.ENUM, + number=13, + enum=TargetType, + ) class RouteInfo(proto.Message): @@ -1116,6 +1210,8 @@ class NextHopType(proto.Enum): happens when the user doesn't have permissions to the project where the next hop resource is located. + SECURE_WEB_PROXY_GATEWAY (13): + Next hop is Secure Web Proxy Gateway. """ NEXT_HOP_TYPE_UNSPECIFIED = 0 NEXT_HOP_IP = 1 @@ -1130,6 +1226,7 @@ class NextHopType(proto.Enum): NEXT_HOP_ILB = 10 NEXT_HOP_ROUTER_APPLIANCE = 11 NEXT_HOP_NCC_HUB = 12 + SECURE_WEB_PROXY_GATEWAY = 13 class RouteScope(proto.Enum): r"""Indicates where routes are applicable. @@ -1561,6 +1658,33 @@ class HealthCheckFirewallState(proto.Enum): ) +class HybridSubnetInfo(proto.Message): + r"""For display only. Metadata associated with a hybrid subnet. + + Attributes: + display_name (str): + Name of a hybrid subnet. + uri (str): + URI of a hybrid subnet. + region (str): + Name of a Google Cloud region where the + hybrid subnet is configured. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + region: str = proto.Field( + proto.STRING, + number=3, + ) + + class VpnGatewayInfo(proto.Message): r"""For display only. Metadata associated with a Compute Engine VPN gateway. @@ -1699,6 +1823,86 @@ class RoutingType(proto.Enum): ) +class InterconnectAttachmentInfo(proto.Message): + r"""For display only. Metadata associated with an Interconnect + attachment. + + Attributes: + display_name (str): + Name of an Interconnect attachment. + uri (str): + URI of an Interconnect attachment. + interconnect_uri (str): + URI of the Interconnect where the + Interconnect attachment is configured. + region (str): + Name of a Google Cloud region where the + Interconnect attachment is configured. + cloud_router_uri (str): + URI of the Cloud Router to be used for + dynamic routing. + type_ (google.cloud.network_management_v1.types.InterconnectAttachmentInfo.Type): + The type of interconnect attachment this is. + l2_attachment_matched_ip_address (str): + Appliance IP address that was matched for L2_DEDICATED + attachments. + """ + + class Type(proto.Enum): + r"""What type of interconnect attachment this is. + + Values: + TYPE_UNSPECIFIED (0): + Unspecified type. + DEDICATED (1): + Attachment to a dedicated interconnect. + PARTNER (2): + Attachment to a partner interconnect, created + by the customer. + PARTNER_PROVIDER (3): + Attachment to a partner interconnect, created + by the partner. + L2_DEDICATED (4): + Attachment to a L2 interconnect, created by + the customer. + """ + TYPE_UNSPECIFIED = 0 + DEDICATED = 1 + PARTNER = 2 + PARTNER_PROVIDER = 3 + L2_DEDICATED = 4 + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + interconnect_uri: str = proto.Field( + proto.STRING, + number=3, + ) + region: str = proto.Field( + proto.STRING, + number=4, + ) + cloud_router_uri: str = proto.Field( + proto.STRING, + number=5, + ) + type_: Type = proto.Field( + proto.ENUM, + number=6, + enum=Type, + ) + l2_attachment_matched_ip_address: str = proto.Field( + proto.STRING, + number=7, + ) + + class EndpointInfo(proto.Message): r"""For display only. The specification of the endpoints for the test. EndpointInfo is derived from source and destination @@ -1780,6 +1984,9 @@ class DeliverInfo(proto.Message): psc_google_api_target (str): PSC Google API target the packet is delivered to (if applicable). + google_service_type (google.cloud.network_management_v1.types.DeliverInfo.GoogleServiceType): + Recognized type of a Google Service the + packet is delivered to (if applicable). """ class Target(proto.Enum): @@ -1852,6 +2059,43 @@ class Target(proto.Enum): REDIS_INSTANCE = 16 REDIS_CLUSTER = 17 + class GoogleServiceType(proto.Enum): + r"""Recognized type of a Google Service. + + Values: + GOOGLE_SERVICE_TYPE_UNSPECIFIED (0): + Unspecified Google Service. + IAP (1): + Identity aware proxy. + https://cloud.google.com/iap/docs/using-tcp-forwarding + GFE_PROXY_OR_HEALTH_CHECK_PROBER (2): + One of two services sharing IP ranges: + + - Load Balancer proxy + - Centralized Health Check prober + https://cloud.google.com/load-balancing/docs/firewall-rules + CLOUD_DNS (3): + Connectivity from Cloud DNS to forwarding + targets or alternate name servers that use + private routing. + https://cloud.google.com/dns/docs/zones/forwarding-zones#firewall-rules + https://cloud.google.com/dns/docs/policies#firewall-rules + PRIVATE_GOOGLE_ACCESS (4): + private.googleapis.com and + restricted.googleapis.com + SERVERLESS_VPC_ACCESS (5): + Google API via Private Service Connect. + https://cloud.google.com/vpc/docs/configure-private-service-connect-apis + Google API via Serverless VPC Access. + https://cloud.google.com/vpc/docs/serverless-vpc-access + """ + GOOGLE_SERVICE_TYPE_UNSPECIFIED = 0 + IAP = 1 + GFE_PROXY_OR_HEALTH_CHECK_PROBER = 2 + CLOUD_DNS = 3 + PRIVATE_GOOGLE_ACCESS = 4 + SERVERLESS_VPC_ACCESS = 5 + target: Target = proto.Field( proto.ENUM, number=1, @@ -1873,6 +2117,11 @@ class Target(proto.Enum): proto.STRING, number=5, ) + google_service_type: GoogleServiceType = proto.Field( + proto.ENUM, + number=6, + enum=GoogleServiceType, + ) class ForwardInfo(proto.Message): @@ -1916,6 +2165,8 @@ class Target(proto.Enum): Forwarded to an NCC Hub. ROUTER_APPLIANCE (9): Forwarded to a router appliance. + SECURE_WEB_PROXY_GATEWAY (10): + Forwarded to a Secure Web Proxy Gateway. """ TARGET_UNSPECIFIED = 0 PEERING_VPC = 1 @@ -1927,6 +2178,7 @@ class Target(proto.Enum): ANOTHER_PROJECT = 7 NCC_HUB = 8 ROUTER_APPLIANCE = 9 + SECURE_WEB_PROXY_GATEWAY = 10 target: Target = proto.Field( proto.ENUM, @@ -2072,9 +2324,13 @@ class Cause(proto.Enum): Aborted because expected route configuration was missing. GOOGLE_MANAGED_SERVICE_AMBIGUOUS_PSC_ENDPOINT (19): - Aborted because a PSC endpoint selection for + Aborted because PSC endpoint selection for the Google-managed service is ambiguous (several PSC endpoints satisfy test input). + GOOGLE_MANAGED_SERVICE_AMBIGUOUS_ENDPOINT (39): + Aborted because endpoint selection for the + Google-managed service is ambiguous (several + endpoints satisfy test input). SOURCE_PSC_CLOUD_SQL_UNSUPPORTED (20): Aborted because tests with a PSC-based Cloud SQL instance as a source are not supported. @@ -2102,6 +2358,9 @@ class Cause(proto.Enum): Cloud Run revision with direct VPC access enabled, but there are no reserved serverless IP ranges. + IP_VERSION_PROTOCOL_MISMATCH (40): + Aborted because the used protocol is not + supported for the used IP version. """ CAUSE_UNSPECIFIED = 0 UNKNOWN_NETWORK = 1 @@ -2132,6 +2391,7 @@ class Cause(proto.Enum): FIREWALL_CONFIG_NOT_FOUND = 26 ROUTE_CONFIG_NOT_FOUND = 27 GOOGLE_MANAGED_SERVICE_AMBIGUOUS_PSC_ENDPOINT = 19 + GOOGLE_MANAGED_SERVICE_AMBIGUOUS_ENDPOINT = 39 SOURCE_PSC_CLOUD_SQL_UNSUPPORTED = 20 SOURCE_REDIS_CLUSTER_UNSUPPORTED = 34 SOURCE_REDIS_INSTANCE_UNSUPPORTED = 35 @@ -2140,6 +2400,7 @@ class Cause(proto.Enum): UNKNOWN_ISSUE_IN_GOOGLE_MANAGED_PROJECT = 30 UNSUPPORTED_GOOGLE_MANAGED_PROJECT_CONFIG = 31 NO_SERVERLESS_IP_RANGES = 37 + IP_VERSION_PROTOCOL_MISMATCH = 40 cause: Cause = proto.Field( proto.ENUM, @@ -2176,6 +2437,12 @@ class DropInfo(proto.Message): (if relevant). region (str): Region of the dropped packet (if relevant). + source_geolocation_code (str): + Geolocation (region code) of the source IP + address (if relevant). + destination_geolocation_code (str): + Geolocation (region code) of the destination + IP address (if relevant). """ class Cause(proto.Enum): @@ -2228,8 +2495,12 @@ class Cause(proto.Enum): invalid (it's not a forwarding rule of the internal passthrough load balancer). NO_ROUTE_FROM_INTERNET_TO_PRIVATE_IPV6_ADDRESS (44): - Packet is sent from the Internet to the - private IPv6 address. + Packet is sent from the Internet or Google + service to the private IPv6 address. + NO_ROUTE_FROM_EXTERNAL_IPV6_SOURCE_TO_PRIVATE_IPV6_ADDRESS (98): + Packet is sent from the external IPv6 source + address of an instance to the private IPv6 + address of an instance. VPN_TUNNEL_LOCAL_SELECTOR_MISMATCH (45): The packet does not match a policy-based VPN tunnel local selector. @@ -2240,20 +2511,19 @@ class Cause(proto.Enum): Packet with internal destination address sent to the internet gateway. PRIVATE_GOOGLE_ACCESS_DISALLOWED (8): - Instance with only an internal IP address + Endpoint with only an internal IP address tries to access Google API and services, but - private Google access is not enabled in the - subnet. + Private Google Access is not enabled in the + subnet or is not applicable. PRIVATE_GOOGLE_ACCESS_VIA_VPN_TUNNEL_UNSUPPORTED (47): Source endpoint tries to access Google API and services through the VPN tunnel to another network, but Private Google Access needs to be enabled in the source endpoint network. NO_EXTERNAL_ADDRESS (9): - Instance with only an internal IP address - tries to access external hosts, but Cloud NAT is - not enabled in the subnet, unless special - configurations on a VM allow this connection. + Endpoint with only an internal IP address + tries to access external hosts, but there is no + matching Cloud NAT gateway in the subnet. UNKNOWN_INTERNAL_ADDRESS (10): Destination internal address cannot be resolved to a known target. If this is a shared @@ -2516,6 +2786,46 @@ class Cause(proto.Enum): Packet with destination IP address within the reserved NAT64 range is dropped due to matching a route of an unsupported type. + TRAFFIC_FROM_HYBRID_ENDPOINT_TO_INTERNET_DISALLOWED (89): + Packet could be dropped because hybrid + endpoint like a VPN gateway or Interconnect is + not allowed to send traffic to the Internet. + NO_MATCHING_NAT64_GATEWAY (90): + Packet with destination IP address within the + reserved NAT64 range is dropped due to no + matching NAT gateway in the subnet. + LOAD_BALANCER_BACKEND_IP_VERSION_MISMATCH (96): + Packet is dropped due to being sent to a + backend of a passthrough load balancer that + doesn't use the same IP version as the frontend. + NO_KNOWN_ROUTE_FROM_NCC_NETWORK_TO_DESTINATION (97): + Packet from the unknown NCC network is + dropped due to no known route from the source + network to the destination IP address. + CLOUD_NAT_PROTOCOL_UNSUPPORTED (99): + Packet is dropped by Cloud NAT due to using + an unsupported protocol. + L2_INTERCONNECT_UNSUPPORTED_PROTOCOL (100): + Packet is dropped due to using an unsupported + protocol (any other than UDP) for L2 + Interconnect. + L2_INTERCONNECT_UNSUPPORTED_PORT (101): + Packet is dropped due to using an unsupported + port (any other than 6081) for L2 Interconnect. + L2_INTERCONNECT_DESTINATION_IP_MISMATCH (102): + Packet is dropped due to destination IP not + matching the appliance mapping IPs configured on + the L2 Interconnect attachment. + NCC_ROUTE_WITHIN_HYBRID_SUBNET_UNSUPPORTED (104): + Packet could be dropped because it matches a + route associated with an NCC spoke in the hybrid + subnet context, but such a configuration is not + supported. + HYBRID_SUBNET_REGION_MISMATCH (105): + Packet is dropped because the region of the + hybrid subnet is different from the region of + the next hop of the route matched within this + hybrid subnet. """ CAUSE_UNSPECIFIED = 0 UNKNOWN_EXTERNAL_ADDRESS = 1 @@ -2532,6 +2842,7 @@ class Cause(proto.Enum): ROUTE_NEXT_HOP_VPN_TUNNEL_NOT_ESTABLISHED = 52 ROUTE_NEXT_HOP_FORWARDING_RULE_TYPE_INVALID = 53 NO_ROUTE_FROM_INTERNET_TO_PRIVATE_IPV6_ADDRESS = 44 + NO_ROUTE_FROM_EXTERNAL_IPV6_SOURCE_TO_PRIVATE_IPV6_ADDRESS = 98 VPN_TUNNEL_LOCAL_SELECTOR_MISMATCH = 45 VPN_TUNNEL_REMOTE_SELECTOR_MISMATCH = 46 PRIVATE_TRAFFIC_TO_INTERNET = 7 @@ -2605,6 +2916,16 @@ class Cause(proto.Enum): PSC_PORT_MAPPING_PORT_MISMATCH = 86 PSC_PORT_MAPPING_WITHOUT_PSC_CONNECTION_UNSUPPORTED = 87 UNSUPPORTED_ROUTE_MATCHED_FOR_NAT64_DESTINATION = 88 + TRAFFIC_FROM_HYBRID_ENDPOINT_TO_INTERNET_DISALLOWED = 89 + NO_MATCHING_NAT64_GATEWAY = 90 + LOAD_BALANCER_BACKEND_IP_VERSION_MISMATCH = 96 + NO_KNOWN_ROUTE_FROM_NCC_NETWORK_TO_DESTINATION = 97 + CLOUD_NAT_PROTOCOL_UNSUPPORTED = 99 + L2_INTERCONNECT_UNSUPPORTED_PROTOCOL = 100 + L2_INTERCONNECT_UNSUPPORTED_PORT = 101 + L2_INTERCONNECT_DESTINATION_IP_MISMATCH = 102 + NCC_ROUTE_WITHIN_HYBRID_SUBNET_UNSUPPORTED = 104 + HYBRID_SUBNET_REGION_MISMATCH = 105 cause: Cause = proto.Field( proto.ENUM, @@ -2627,6 +2948,14 @@ class Cause(proto.Enum): proto.STRING, number=5, ) + source_geolocation_code: str = proto.Field( + proto.STRING, + number=6, + ) + destination_geolocation_code: str = proto.Field( + proto.STRING, + number=7, + ) class GKEMasterInfo(proto.Message): @@ -3056,12 +3385,15 @@ class Type(proto.Enum): Cloud NAT Gateway. PRIVATE_SERVICE_CONNECT (4): Private service connect NAT. + GKE_POD_IP_MASQUERADING (5): + GKE Pod IP address masquerading. """ TYPE_UNSPECIFIED = 0 INTERNAL_TO_EXTERNAL = 1 EXTERNAL_TO_INTERNAL = 2 CLOUD_NAT = 3 PRIVATE_SERVICE_CONNECT = 4 + GKE_POD_IP_MASQUERADING = 5 type_: Type = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/vpc_flow_logs.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/vpc_flow_logs.py index 713b5b9ddf46..4b3746f0e842 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/vpc_flow_logs.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/vpc_flow_logs.py @@ -33,6 +33,10 @@ "CreateVpcFlowLogsConfigRequest", "UpdateVpcFlowLogsConfigRequest", "DeleteVpcFlowLogsConfigRequest", + "QueryOrgVpcFlowLogsConfigsRequest", + "QueryOrgVpcFlowLogsConfigsResponse", + "ShowEffectiveFlowLogsConfigsRequest", + "ShowEffectiveFlowLogsConfigsResponse", }, ) @@ -42,8 +46,14 @@ class ListVpcFlowLogsConfigsRequest(proto.Message): Attributes: parent (str): - Required. The parent resource of the VpcFlowLogsConfig: - ``projects/{project_id}/locations/global`` + Required. The parent resource of the VpcFlowLogsConfig, in + one of the following formats: + + - For project-level resourcs: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` page_size (int): Optional. Number of ``VpcFlowLogsConfigs`` to return. page_token (str): @@ -120,9 +130,14 @@ class GetVpcFlowLogsConfigRequest(proto.Message): Attributes: name (str): - Required. ``VpcFlowLogsConfig`` resource name using the - form: - ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}`` + Required. The resource name of the VpcFlowLogsConfig, in one + of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` """ name: str = proto.Field( @@ -136,9 +151,14 @@ class CreateVpcFlowLogsConfigRequest(proto.Message): Attributes: parent (str): - Required. The parent resource of the VPC Flow Logs - configuration to create: - ``projects/{project_id}/locations/global`` + Required. The parent resource of the VpcFlowLogsConfig to + create, in one of the following formats: + + - For project-level resources: + ``projects/{project_id}/locations/global`` + + - For organization-level resources: + ``organizations/{organization_id}/locations/global`` vpc_flow_logs_config_id (str): Required. ID of the ``VpcFlowLogsConfig``. vpc_flow_logs_config (google.cloud.network_management_v1.types.VpcFlowLogsConfig): @@ -165,8 +185,11 @@ class UpdateVpcFlowLogsConfigRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Mask of fields to update. At least - one path must be supplied in this field. + Required. Mask of fields to update. At least one path must + be supplied in this field. For example, to change the state + of the configuration to ENABLED, specify ``update_mask`` = + ``"state"``, and the ``vpc_flow_logs_config`` would be: + ``vpc_flow_logs_config = { name = "projects/my-project/locations/global/vpcFlowLogsConfigs/my-config" state = "ENABLED" }`` vpc_flow_logs_config (google.cloud.network_management_v1.types.VpcFlowLogsConfig): Required. Only fields specified in update_mask are updated. """ @@ -188,9 +211,14 @@ class DeleteVpcFlowLogsConfigRequest(proto.Message): Attributes: name (str): - Required. ``VpcFlowLogsConfig`` resource name using the - form: - ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config}`` + Required. The resource name of the VpcFlowLogsConfig, in one + of the following formats: + + - For a project-level resource: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For an organization-level resource: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` """ name: str = proto.Field( @@ -199,4 +227,163 @@ class DeleteVpcFlowLogsConfigRequest(proto.Message): ) +class QueryOrgVpcFlowLogsConfigsRequest(proto.Message): + r"""Request for the ``QueryOrgVpcFlowLogsConfigs`` method. + + Attributes: + parent (str): + Required. The parent resource of the VpcFlowLogsConfig, + specified in the following format: + ``projects/{project_id}/locations/global`` + page_size (int): + Optional. Number of ``VpcFlowLogsConfigs`` to return. + page_token (str): + Optional. Page token from an earlier query, as returned in + ``next_page_token``. + filter (str): + Optional. Lists the ``VpcFlowLogsConfigs`` that match the + filter expression. A filter expression must use the + supported [CEL logic operators] + (https://cloud.google.com/vpc/docs/about-flow-logs-records#supported_cel_logic_operators). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class QueryOrgVpcFlowLogsConfigsResponse(proto.Message): + r"""Response for the ``QueryVpcFlowLogsConfigs`` method. + + Attributes: + vpc_flow_logs_configs (MutableSequence[google.cloud.network_management_v1.types.VpcFlowLogsConfig]): + List of VPC Flow Log configurations. + next_page_token (str): + Page token to fetch the next set of + configurations. + unreachable (MutableSequence[str]): + Locations that could not be reached (when querying all + locations with ``-``). + """ + + @property + def raw_page(self): + return self + + vpc_flow_logs_configs: MutableSequence[ + gcn_vpc_flow_logs_config.VpcFlowLogsConfig + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcn_vpc_flow_logs_config.VpcFlowLogsConfig, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ShowEffectiveFlowLogsConfigsRequest(proto.Message): + r"""Request for the ``ShowEffectiveFlowLogsConfigs`` method. + + Attributes: + parent (str): + Required. The parent resource of the VpcFlowLogsConfig, + specified in the following format: + ``projects/{project_id}/locations/global`` + resource (str): + Required. The resource to get the effective + VPC Flow Logs configuration for. The resource + must belong to the same project as the parent. + The resource must be a network, subnetwork, + interconnect attachment, VPN tunnel, or a + project. + page_size (int): + Optional. Number of ``EffectiveVpcFlowLogsConfigs`` to + return. Default is 30. + page_token (str): + Optional. Page token from an earlier query, as returned in + ``next_page_token``. + filter (str): + Optional. Lists the ``EffectiveVpcFlowLogsConfigs`` that + match the filter expression. A filter expression must use + the supported [CEL logic operators] + (https://cloud.google.com/vpc/docs/about-flow-logs-records#supported_cel_logic_operators). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + resource: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ShowEffectiveFlowLogsConfigsResponse(proto.Message): + r"""Response for the ``ShowEffectiveFlowLogsConfigs`` method. + + Attributes: + effective_flow_logs_configs (MutableSequence[google.cloud.network_management_v1.types.EffectiveVpcFlowLogsConfig]): + List of Effective Vpc Flow Logs + configurations. + next_page_token (str): + Page token to fetch the next set of + configurations. + unreachable (MutableSequence[str]): + Locations that could not be reached (when querying all + locations with ``-``). + """ + + @property + def raw_page(self): + return self + + effective_flow_logs_configs: MutableSequence[ + gcn_vpc_flow_logs_config.EffectiveVpcFlowLogsConfig + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gcn_vpc_flow_logs_config.EffectiveVpcFlowLogsConfig, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/vpc_flow_logs_config.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/vpc_flow_logs_config.py index c1f2da89ef74..9007f989019f 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/vpc_flow_logs_config.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/vpc_flow_logs_config.py @@ -24,6 +24,7 @@ package="google.cloud.networkmanagement.v1", manifest={ "VpcFlowLogsConfig", + "EffectiveVpcFlowLogsConfig", }, ) @@ -40,8 +41,14 @@ class VpcFlowLogsConfig(proto.Message): Attributes: name (str): - Identifier. Unique name of the configuration using the form: - ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + Identifier. Unique name of the configuration. The name can + have one of the following forms: + + - For project-level configurations: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For organization-level configurations: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` description (str): Optional. The user-supplied description of the VPC Flow Logs configuration. Maximum of 512 @@ -52,7 +59,8 @@ class VpcFlowLogsConfig(proto.Message): Optional. The state of the VPC Flow Log configuration. Default value is ENABLED. When creating a new configuration, it must be - enabled. + enabled. Setting state=DISABLED will pause the + log generation for this config. This field is a member of `oneof`_ ``_state``. aggregation_interval (google.cloud.network_management_v1.types.VpcFlowLogsConfig.AggregationInterval): @@ -83,12 +91,31 @@ class VpcFlowLogsConfig(proto.Message): VPC Flow Logs should be logged. This field is a member of `oneof`_ ``_filter_expr``. + cross_project_metadata (google.cloud.network_management_v1.types.VpcFlowLogsConfig.CrossProjectMetadata): + Optional. Determines whether to include cross project + annotations in the logs. This field is available only for + organization configurations. If not specified in org configs + will be set to CROSS_PROJECT_METADATA_ENABLED. + + This field is a member of `oneof`_ ``_cross_project_metadata``. target_resource_state (google.cloud.network_management_v1.types.VpcFlowLogsConfig.TargetResourceState): - Output only. A diagnostic bit - describes the - state of the configured target resource for - diagnostic purposes. + Output only. Describes the state of the + configured target resource for diagnostic + purposes. This field is a member of `oneof`_ ``_target_resource_state``. + network (str): + Traffic will be logged from VMs, VPN tunnels and + Interconnect Attachments within the network. Format: + projects/{project_id}/global/networks/{name} + + This field is a member of `oneof`_ ``target_resource``. + subnet (str): + Traffic will be logged from VMs within the subnetwork. + Format: + projects/{project_id}/regions/{region}/subnetworks/{name} + + This field is a member of `oneof`_ ``target_resource``. interconnect_attachment (str): Traffic will be logged from the Interconnect Attachment. Format: @@ -111,8 +138,7 @@ class VpcFlowLogsConfig(proto.Message): class State(proto.Enum): r"""Determines whether this configuration will be generating - logs. Setting state=DISABLED will pause the log generation for - this config. + logs. Values: STATE_UNSPECIFIED (0): @@ -174,9 +200,29 @@ class Metadata(proto.Enum): EXCLUDE_ALL_METADATA = 2 CUSTOM_METADATA = 3 + class CrossProjectMetadata(proto.Enum): + r"""Determines whether to include cross project annotations in the logs. + Project configurations will always have + CROSS_PROJECT_METADATA_DISABLED. + + Values: + CROSS_PROJECT_METADATA_UNSPECIFIED (0): + If not specified, the default is + CROSS_PROJECT_METADATA_ENABLED. + CROSS_PROJECT_METADATA_ENABLED (1): + When CROSS_PROJECT_METADATA_ENABLED, metadata from other + projects will be included in the logs. + CROSS_PROJECT_METADATA_DISABLED (2): + When CROSS_PROJECT_METADATA_DISABLED, metadata from other + projects will not be included in the logs. + """ + CROSS_PROJECT_METADATA_UNSPECIFIED = 0 + CROSS_PROJECT_METADATA_ENABLED = 1 + CROSS_PROJECT_METADATA_DISABLED = 2 + class TargetResourceState(proto.Enum): - r"""Optional states of the target resource that are used as part - of the diagnostic bit. + r"""Output only. Indicates whether the target resource exists, + for diagnostic purposes. Values: TARGET_RESOURCE_STATE_UNSPECIFIED (0): @@ -232,12 +278,28 @@ class TargetResourceState(proto.Enum): number=8, optional=True, ) + cross_project_metadata: CrossProjectMetadata = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum=CrossProjectMetadata, + ) target_resource_state: TargetResourceState = proto.Field( proto.ENUM, number=12, optional=True, enum=TargetResourceState, ) + network: str = proto.Field( + proto.STRING, + number=100, + oneof="target_resource", + ) + subnet: str = proto.Field( + proto.STRING, + number=101, + oneof="target_resource", + ) interconnect_attachment: str = proto.Field( proto.STRING, number=102, @@ -265,4 +327,203 @@ class TargetResourceState(proto.Enum): ) +class EffectiveVpcFlowLogsConfig(proto.Message): + r"""A configuration to generate a response for + GetEffectiveVpcFlowLogsConfig request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Unique name of the configuration. The name can have one of + the following forms: + + - For project-level configurations: + ``projects/{project_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For organization-level configurations: + ``organizations/{organization_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}`` + + - For a Compute config, the name will be the path of the + subnet: + ``projects/{project_id}/regions/{region}/subnetworks/{subnet_id}`` + state (google.cloud.network_management_v1.types.VpcFlowLogsConfig.State): + The state of the VPC Flow Log configuration. + Default value is ENABLED. When creating a new + configuration, it must be enabled. Setting + state=DISABLED will pause the log generation for + this config. + + This field is a member of `oneof`_ ``_state``. + aggregation_interval (google.cloud.network_management_v1.types.VpcFlowLogsConfig.AggregationInterval): + The aggregation interval for the logs. Default value is + INTERVAL_5_SEC. + + This field is a member of `oneof`_ ``_aggregation_interval``. + flow_sampling (float): + The value of the field must be in (0, 1]. The sampling rate + of VPC Flow Logs where 1.0 means all collected logs are + reported. Setting the sampling rate to 0.0 is not allowed. + If you want to disable VPC Flow Logs, use the state field + instead. Default value is 1.0. + + This field is a member of `oneof`_ ``_flow_sampling``. + metadata (google.cloud.network_management_v1.types.VpcFlowLogsConfig.Metadata): + Configures whether all, none or a subset of metadata fields + should be added to the reported VPC flow logs. Default value + is INCLUDE_ALL_METADATA. + + This field is a member of `oneof`_ ``_metadata``. + metadata_fields (MutableSequence[str]): + Custom metadata fields to include in the reported VPC flow + logs. Can only be specified if "metadata" was set to + CUSTOM_METADATA. + filter_expr (str): + Export filter used to define which VPC Flow + Logs should be logged. + + This field is a member of `oneof`_ ``_filter_expr``. + cross_project_metadata (google.cloud.network_management_v1.types.VpcFlowLogsConfig.CrossProjectMetadata): + Determines whether to include cross project annotations in + the logs. This field is available only for organization + configurations. If not specified in org configs will be set + to CROSS_PROJECT_METADATA_ENABLED. + + This field is a member of `oneof`_ ``_cross_project_metadata``. + network (str): + Traffic will be logged from VMs, VPN tunnels and + Interconnect Attachments within the network. Format: + projects/{project_id}/global/networks/{name} + + This field is a member of `oneof`_ ``target_resource``. + subnet (str): + Traffic will be logged from VMs within the subnetwork. + Format: + projects/{project_id}/regions/{region}/subnetworks/{name} + + This field is a member of `oneof`_ ``target_resource``. + interconnect_attachment (str): + Traffic will be logged from the Interconnect Attachment. + Format: + projects/{project_id}/regions/{region}/interconnectAttachments/{name} + + This field is a member of `oneof`_ ``target_resource``. + vpn_tunnel (str): + Traffic will be logged from the VPN Tunnel. Format: + projects/{project_id}/regions/{region}/vpnTunnels/{name} + + This field is a member of `oneof`_ ``target_resource``. + scope (google.cloud.network_management_v1.types.EffectiveVpcFlowLogsConfig.Scope): + Specifies the scope of the config (e.g., + SUBNET, NETWORK, ORGANIZATION..). + + This field is a member of `oneof`_ ``_scope``. + """ + + class Scope(proto.Enum): + r"""The scope for this flow log configuration. + + Values: + SCOPE_UNSPECIFIED (0): + Scope is unspecified. + SUBNET (1): + Target resource is a subnet (Network + Management API). + COMPUTE_API_SUBNET (2): + Target resource is a subnet, and the config + originates from the Compute API. + NETWORK (3): + Target resource is a network. + VPN_TUNNEL (4): + Target resource is a VPN tunnel. + INTERCONNECT_ATTACHMENT (5): + Target resource is an interconnect + attachment. + ORGANIZATION (6): + Configuration applies to an entire + organization. + """ + SCOPE_UNSPECIFIED = 0 + SUBNET = 1 + COMPUTE_API_SUBNET = 2 + NETWORK = 3 + VPN_TUNNEL = 4 + INTERCONNECT_ATTACHMENT = 5 + ORGANIZATION = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + state: "VpcFlowLogsConfig.State" = proto.Field( + proto.ENUM, + number=3, + optional=True, + enum="VpcFlowLogsConfig.State", + ) + aggregation_interval: "VpcFlowLogsConfig.AggregationInterval" = proto.Field( + proto.ENUM, + number=4, + optional=True, + enum="VpcFlowLogsConfig.AggregationInterval", + ) + flow_sampling: float = proto.Field( + proto.FLOAT, + number=5, + optional=True, + ) + metadata: "VpcFlowLogsConfig.Metadata" = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum="VpcFlowLogsConfig.Metadata", + ) + metadata_fields: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + filter_expr: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + cross_project_metadata: "VpcFlowLogsConfig.CrossProjectMetadata" = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum="VpcFlowLogsConfig.CrossProjectMetadata", + ) + network: str = proto.Field( + proto.STRING, + number=100, + oneof="target_resource", + ) + subnet: str = proto.Field( + proto.STRING, + number=101, + oneof="target_resource", + ) + interconnect_attachment: str = proto.Field( + proto.STRING, + number=102, + oneof="target_resource", + ) + vpn_tunnel: str = proto.Field( + proto.STRING, + number=103, + oneof="target_resource", + ) + scope: Scope = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum=Scope, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-network-management/noxfile.py b/packages/google-cloud-network-management/noxfile.py index 0279b0a48555..214697943dbf 100644 --- a/packages/google-cloud-network-management/noxfile.py +++ b/packages/google-cloud-network-management/noxfile.py @@ -27,6 +27,10 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") + ALL_PYTHON = [ "3.7", "3.8", diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py new file mode 100644 index 000000000000..5bd2d595cfb2 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVpcFlowLogsConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_CreateVpcFlowLogsConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +async def sample_create_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() + vpc_flow_logs_config.network = "network_value" + + request = network_management_v1.CreateVpcFlowLogsConfigRequest( + parent="parent_value", + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + # Make the request + operation = client.create_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_CreateVpcFlowLogsConfig_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py new file mode 100644 index 000000000000..739c2e08a985 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateVpcFlowLogsConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_CreateVpcFlowLogsConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +def sample_create_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() + vpc_flow_logs_config.network = "network_value" + + request = network_management_v1.CreateVpcFlowLogsConfigRequest( + parent="parent_value", + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + # Make the request + operation = client.create_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_CreateVpcFlowLogsConfig_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py new file mode 100644 index 000000000000..0e243f099714 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVpcFlowLogsConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_DeleteVpcFlowLogsConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +async def sample_delete_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.DeleteVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_DeleteVpcFlowLogsConfig_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py new file mode 100644 index 000000000000..d4c5855f3974 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteVpcFlowLogsConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_DeleteVpcFlowLogsConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +def sample_delete_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.DeleteVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_DeleteVpcFlowLogsConfig_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py new file mode 100644 index 000000000000..15da3a4843a4 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVpcFlowLogsConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_GetVpcFlowLogsConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +async def sample_get_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.GetVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_vpc_flow_logs_config(request=request) + + # Handle the response + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_GetVpcFlowLogsConfig_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py new file mode 100644 index 000000000000..42875432e339 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetVpcFlowLogsConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_GetVpcFlowLogsConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +def sample_get_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.GetVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_vpc_flow_logs_config(request=request) + + # Handle the response + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_GetVpcFlowLogsConfig_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py new file mode 100644 index 000000000000..ea41933f7ef7 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVpcFlowLogsConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_ListVpcFlowLogsConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +async def sample_list_vpc_flow_logs_configs(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.ListVpcFlowLogsConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vpc_flow_logs_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_ListVpcFlowLogsConfigs_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py new file mode 100644 index 000000000000..daba6cb9e886 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListVpcFlowLogsConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_ListVpcFlowLogsConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +def sample_list_vpc_flow_logs_configs(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.ListVpcFlowLogsConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_vpc_flow_logs_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_ListVpcFlowLogsConfigs_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py new file mode 100644 index 000000000000..255a507ca10b --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVpcFlowLogsConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_UpdateVpcFlowLogsConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +async def sample_update_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() + vpc_flow_logs_config.network = "network_value" + + request = network_management_v1.UpdateVpcFlowLogsConfigRequest( + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + # Make the request + operation = client.update_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_UpdateVpcFlowLogsConfig_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py new file mode 100644 index 000000000000..1fb03bf14ecb --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateVpcFlowLogsConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_OrganizationVpcFlowLogsService_UpdateVpcFlowLogsConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +def sample_update_vpc_flow_logs_config(): + # Create a client + client = network_management_v1.OrganizationVpcFlowLogsServiceClient() + + # Initialize request argument(s) + vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() + vpc_flow_logs_config.network = "network_value" + + request = network_management_v1.UpdateVpcFlowLogsConfigRequest( + vpc_flow_logs_config=vpc_flow_logs_config, + ) + + # Make the request + operation = client.update_vpc_flow_logs_config(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networkmanagement_v1_generated_OrganizationVpcFlowLogsService_UpdateVpcFlowLogsConfig_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_create_connectivity_test_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_create_connectivity_test_async.py index 4df7bf889bf5..afddac96e32d 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_create_connectivity_test_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_create_connectivity_test_async.py @@ -54,4 +54,5 @@ async def sample_create_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_CreateConnectivityTest_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_create_connectivity_test_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_create_connectivity_test_sync.py index c3c1865cae19..383ec0c923e6 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_create_connectivity_test_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_create_connectivity_test_sync.py @@ -54,4 +54,5 @@ def sample_create_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_CreateConnectivityTest_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_delete_connectivity_test_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_delete_connectivity_test_async.py index 425bf5a2b4bb..fec1cb24e740 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_delete_connectivity_test_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_delete_connectivity_test_async.py @@ -53,4 +53,5 @@ async def sample_delete_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_DeleteConnectivityTest_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_delete_connectivity_test_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_delete_connectivity_test_sync.py index c72b2a506e39..8823d9be5a81 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_delete_connectivity_test_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_delete_connectivity_test_sync.py @@ -53,4 +53,5 @@ def sample_delete_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_DeleteConnectivityTest_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_get_connectivity_test_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_get_connectivity_test_async.py index 9ec825051d76..ec4ce6f7950d 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_get_connectivity_test_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_get_connectivity_test_async.py @@ -49,4 +49,5 @@ async def sample_get_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_GetConnectivityTest_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_get_connectivity_test_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_get_connectivity_test_sync.py index 2c28e17d7f58..a6ae687d17c3 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_get_connectivity_test_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_get_connectivity_test_sync.py @@ -49,4 +49,5 @@ def sample_get_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_GetConnectivityTest_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_list_connectivity_tests_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_list_connectivity_tests_async.py index 513f7d442f2b..e6ec98b6795d 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_list_connectivity_tests_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_list_connectivity_tests_async.py @@ -50,4 +50,5 @@ async def sample_list_connectivity_tests(): async for response in page_result: print(response) + # [END networkmanagement_v1_generated_ReachabilityService_ListConnectivityTests_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_list_connectivity_tests_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_list_connectivity_tests_sync.py index ffb070008ca6..c4bc2b597343 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_list_connectivity_tests_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_list_connectivity_tests_sync.py @@ -50,4 +50,5 @@ def sample_list_connectivity_tests(): for response in page_result: print(response) + # [END networkmanagement_v1_generated_ReachabilityService_ListConnectivityTests_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_async.py index cc3760803a14..055b89f6050e 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_async.py @@ -53,4 +53,5 @@ async def sample_rerun_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_RerunConnectivityTest_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_sync.py index 5728379fa7fe..20d0b952a213 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_sync.py @@ -53,4 +53,5 @@ def sample_rerun_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_RerunConnectivityTest_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_update_connectivity_test_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_update_connectivity_test_async.py index 95cedd225a4f..3d01ec601a9c 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_update_connectivity_test_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_update_connectivity_test_async.py @@ -39,8 +39,7 @@ async def sample_update_connectivity_test(): client = network_management_v1.ReachabilityServiceAsyncClient() # Initialize request argument(s) - request = network_management_v1.UpdateConnectivityTestRequest( - ) + request = network_management_v1.UpdateConnectivityTestRequest() # Make the request operation = client.update_connectivity_test(request=request) @@ -52,4 +51,5 @@ async def sample_update_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_UpdateConnectivityTest_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_update_connectivity_test_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_update_connectivity_test_sync.py index b579e7028b14..b1079ecd4f31 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_update_connectivity_test_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_reachability_service_update_connectivity_test_sync.py @@ -39,8 +39,7 @@ def sample_update_connectivity_test(): client = network_management_v1.ReachabilityServiceClient() # Initialize request argument(s) - request = network_management_v1.UpdateConnectivityTestRequest( - ) + request = network_management_v1.UpdateConnectivityTestRequest() # Make the request operation = client.update_connectivity_test(request=request) @@ -52,4 +51,5 @@ def sample_update_connectivity_test(): # Handle the response print(response) + # [END networkmanagement_v1_generated_ReachabilityService_UpdateConnectivityTest_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py index cf691be8f586..05939cb8f1cd 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py @@ -40,7 +40,7 @@ async def sample_create_vpc_flow_logs_config(): # Initialize request argument(s) vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() - vpc_flow_logs_config.interconnect_attachment = "interconnect_attachment_value" + vpc_flow_logs_config.network = "network_value" request = network_management_v1.CreateVpcFlowLogsConfigRequest( parent="parent_value", @@ -58,4 +58,5 @@ async def sample_create_vpc_flow_logs_config(): # Handle the response print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_CreateVpcFlowLogsConfig_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py index cc83f8b107bf..af643cea112e 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py @@ -40,7 +40,7 @@ def sample_create_vpc_flow_logs_config(): # Initialize request argument(s) vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() - vpc_flow_logs_config.interconnect_attachment = "interconnect_attachment_value" + vpc_flow_logs_config.network = "network_value" request = network_management_v1.CreateVpcFlowLogsConfigRequest( parent="parent_value", @@ -58,4 +58,5 @@ def sample_create_vpc_flow_logs_config(): # Handle the response print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_CreateVpcFlowLogsConfig_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py index b6a508635a6b..cd9397ca0627 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py @@ -53,4 +53,5 @@ async def sample_delete_vpc_flow_logs_config(): # Handle the response print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_DeleteVpcFlowLogsConfig_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py index b0316ae971ed..067976d22370 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py @@ -53,4 +53,5 @@ def sample_delete_vpc_flow_logs_config(): # Handle the response print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_DeleteVpcFlowLogsConfig_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py index a81219377c67..4ba45a0450d2 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py @@ -49,4 +49,5 @@ async def sample_get_vpc_flow_logs_config(): # Handle the response print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_GetVpcFlowLogsConfig_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py index d1f2b918e67b..be4fae89e683 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py @@ -49,4 +49,5 @@ def sample_get_vpc_flow_logs_config(): # Handle the response print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_GetVpcFlowLogsConfig_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py index 51b5e58ea9a0..f1e652241e5d 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py @@ -50,4 +50,5 @@ async def sample_list_vpc_flow_logs_configs(): async for response in page_result: print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_ListVpcFlowLogsConfigs_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py index 61a8c08ab54b..3d174f08b2f0 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py @@ -50,4 +50,5 @@ def sample_list_vpc_flow_logs_configs(): for response in page_result: print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_ListVpcFlowLogsConfigs_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_async.py new file mode 100644 index 000000000000..5dd464df01d2 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryOrgVpcFlowLogsConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_VpcFlowLogsService_QueryOrgVpcFlowLogsConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +async def sample_query_org_vpc_flow_logs_configs(): + # Create a client + client = network_management_v1.VpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.QueryOrgVpcFlowLogsConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.query_org_vpc_flow_logs_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networkmanagement_v1_generated_VpcFlowLogsService_QueryOrgVpcFlowLogsConfigs_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_sync.py new file mode 100644 index 000000000000..483c28ada765 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryOrgVpcFlowLogsConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_VpcFlowLogsService_QueryOrgVpcFlowLogsConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +def sample_query_org_vpc_flow_logs_configs(): + # Create a client + client = network_management_v1.VpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.QueryOrgVpcFlowLogsConfigsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.query_org_vpc_flow_logs_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networkmanagement_v1_generated_VpcFlowLogsService_QueryOrgVpcFlowLogsConfigs_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_async.py new file mode 100644 index 000000000000..2ac3ab255724 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ShowEffectiveFlowLogsConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_VpcFlowLogsService_ShowEffectiveFlowLogsConfigs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +async def sample_show_effective_flow_logs_configs(): + # Create a client + client = network_management_v1.VpcFlowLogsServiceAsyncClient() + + # Initialize request argument(s) + request = network_management_v1.ShowEffectiveFlowLogsConfigsRequest( + parent="parent_value", + resource="resource_value", + ) + + # Make the request + page_result = client.show_effective_flow_logs_configs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networkmanagement_v1_generated_VpcFlowLogsService_ShowEffectiveFlowLogsConfigs_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_sync.py new file mode 100644 index 000000000000..4bd82ed5c0d9 --- /dev/null +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ShowEffectiveFlowLogsConfigs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-management + + +# [START networkmanagement_v1_generated_VpcFlowLogsService_ShowEffectiveFlowLogsConfigs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_management_v1 + + +def sample_show_effective_flow_logs_configs(): + # Create a client + client = network_management_v1.VpcFlowLogsServiceClient() + + # Initialize request argument(s) + request = network_management_v1.ShowEffectiveFlowLogsConfigsRequest( + parent="parent_value", + resource="resource_value", + ) + + # Make the request + page_result = client.show_effective_flow_logs_configs(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networkmanagement_v1_generated_VpcFlowLogsService_ShowEffectiveFlowLogsConfigs_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py index b20bb4028578..5f83e2942c2a 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py @@ -40,7 +40,7 @@ async def sample_update_vpc_flow_logs_config(): # Initialize request argument(s) vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() - vpc_flow_logs_config.interconnect_attachment = "interconnect_attachment_value" + vpc_flow_logs_config.network = "network_value" request = network_management_v1.UpdateVpcFlowLogsConfigRequest( vpc_flow_logs_config=vpc_flow_logs_config, @@ -56,4 +56,5 @@ async def sample_update_vpc_flow_logs_config(): # Handle the response print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_UpdateVpcFlowLogsConfig_async] diff --git a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py index 329a148e1e20..8fe0ef4337b1 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py +++ b/packages/google-cloud-network-management/samples/generated_samples/networkmanagement_v1_generated_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py @@ -40,7 +40,7 @@ def sample_update_vpc_flow_logs_config(): # Initialize request argument(s) vpc_flow_logs_config = network_management_v1.VpcFlowLogsConfig() - vpc_flow_logs_config.interconnect_attachment = "interconnect_attachment_value" + vpc_flow_logs_config.network = "network_value" request = network_management_v1.UpdateVpcFlowLogsConfigRequest( vpc_flow_logs_config=vpc_flow_logs_config, @@ -56,4 +56,5 @@ def sample_update_vpc_flow_logs_config(): # Handle the response print(response) + # [END networkmanagement_v1_generated_VpcFlowLogsService_UpdateVpcFlowLogsConfig_sync] diff --git a/packages/google-cloud-network-management/samples/generated_samples/snippet_metadata_google.cloud.networkmanagement.v1.json b/packages/google-cloud-network-management/samples/generated_samples/snippet_metadata_google.cloud.networkmanagement.v1.json index 3adbef3413c1..22b88eb7162f 100644 --- a/packages/google-cloud-network-management/samples/generated_samples/snippet_metadata_google.cloud.networkmanagement.v1.json +++ b/packages/google-cloud-network-management/samples/generated_samples/snippet_metadata_google.cloud.networkmanagement.v1.json @@ -16,34 +16,34 @@ "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", - "shortName": "ReachabilityServiceAsyncClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient", + "shortName": "OrganizationVpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.create_connectivity_test", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient.create_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.CreateConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.CreateVpcFlowLogsConfig", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "CreateConnectivityTest" + "shortName": "CreateVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.CreateConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest" }, { "name": "parent", "type": "str" }, { - "name": "test_id", - "type": "str" + "name": "vpc_flow_logs_config", + "type": "google.cloud.network_management_v1.types.VpcFlowLogsConfig" }, { - "name": "resource", - "type": "google.cloud.network_management_v1.types.ConnectivityTest" + "name": "vpc_flow_logs_config_id", + "type": "str" }, { "name": "retry", @@ -59,21 +59,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_connectivity_test" + "shortName": "create_vpc_flow_logs_config" }, - "description": "Sample for CreateConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_create_connectivity_test_async.py", + "description": "Sample for CreateVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_CreateConnectivityTest_async", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_CreateVpcFlowLogsConfig_async", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -83,55 +83,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_create_connectivity_test_async.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", - "shortName": "ReachabilityServiceClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient", + "shortName": "OrganizationVpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.create_connectivity_test", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient.create_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.CreateConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.CreateVpcFlowLogsConfig", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "CreateConnectivityTest" + "shortName": "CreateVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.CreateConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest" }, { "name": "parent", "type": "str" }, { - "name": "test_id", - "type": "str" + "name": "vpc_flow_logs_config", + "type": "google.cloud.network_management_v1.types.VpcFlowLogsConfig" }, { - "name": "resource", - "type": "google.cloud.network_management_v1.types.ConnectivityTest" + "name": "vpc_flow_logs_config_id", + "type": "str" }, { "name": "retry", @@ -147,21 +147,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_connectivity_test" + "shortName": "create_vpc_flow_logs_config" }, - "description": "Sample for CreateConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_create_connectivity_test_sync.py", + "description": "Sample for CreateVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_CreateConnectivityTest_sync", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_CreateVpcFlowLogsConfig_sync", "segments": [ { - "end": 56, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 60, "start": 27, "type": "SHORT" }, @@ -171,44 +171,44 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_create_connectivity_test_sync.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", - "shortName": "ReachabilityServiceAsyncClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient", + "shortName": "OrganizationVpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.delete_connectivity_test", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient.delete_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.DeleteConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.DeleteVpcFlowLogsConfig", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "DeleteConnectivityTest" + "shortName": "DeleteVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.DeleteConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest" }, { "name": "name", @@ -228,13 +228,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_connectivity_test" + "shortName": "delete_vpc_flow_logs_config" }, - "description": "Sample for DeleteConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_delete_connectivity_test_async.py", + "description": "Sample for DeleteVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_DeleteConnectivityTest_async", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_DeleteVpcFlowLogsConfig_async", "segments": [ { "end": 55, @@ -267,28 +267,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_delete_connectivity_test_async.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", - "shortName": "ReachabilityServiceClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient", + "shortName": "OrganizationVpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.delete_connectivity_test", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient.delete_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.DeleteConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.DeleteVpcFlowLogsConfig", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "DeleteConnectivityTest" + "shortName": "DeleteVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.DeleteConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest" }, { "name": "name", @@ -308,13 +308,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_connectivity_test" + "shortName": "delete_vpc_flow_logs_config" }, - "description": "Sample for DeleteConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_delete_connectivity_test_sync.py", + "description": "Sample for DeleteVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_DeleteConnectivityTest_sync", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_DeleteVpcFlowLogsConfig_sync", "segments": [ { "end": 55, @@ -347,29 +347,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_delete_connectivity_test_sync.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", - "shortName": "ReachabilityServiceAsyncClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient", + "shortName": "OrganizationVpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.get_connectivity_test", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient.get_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.GetConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.GetVpcFlowLogsConfig", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "GetConnectivityTest" + "shortName": "GetVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.GetConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest" }, { "name": "name", @@ -388,14 +388,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_management_v1.types.ConnectivityTest", - "shortName": "get_connectivity_test" + "resultType": "google.cloud.network_management_v1.types.VpcFlowLogsConfig", + "shortName": "get_vpc_flow_logs_config" }, - "description": "Sample for GetConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_get_connectivity_test_async.py", + "description": "Sample for GetVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_GetConnectivityTest_async", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_GetVpcFlowLogsConfig_async", "segments": [ { "end": 51, @@ -428,28 +428,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_get_connectivity_test_async.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", - "shortName": "ReachabilityServiceClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient", + "shortName": "OrganizationVpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.get_connectivity_test", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient.get_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.GetConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.GetVpcFlowLogsConfig", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "GetConnectivityTest" + "shortName": "GetVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.GetConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest" }, { "name": "name", @@ -468,14 +468,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_management_v1.types.ConnectivityTest", - "shortName": "get_connectivity_test" + "resultType": "google.cloud.network_management_v1.types.VpcFlowLogsConfig", + "shortName": "get_vpc_flow_logs_config" }, - "description": "Sample for GetConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_get_connectivity_test_sync.py", + "description": "Sample for GetVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_GetConnectivityTest_sync", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_GetVpcFlowLogsConfig_sync", "segments": [ { "end": 51, @@ -508,29 +508,29 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_get_connectivity_test_sync.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", - "shortName": "ReachabilityServiceAsyncClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient", + "shortName": "OrganizationVpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.list_connectivity_tests", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient.list_vpc_flow_logs_configs", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.ListConnectivityTests", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.ListVpcFlowLogsConfigs", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "ListConnectivityTests" + "shortName": "ListVpcFlowLogsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.ListConnectivityTestsRequest" + "type": "google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest" }, { "name": "parent", @@ -549,14 +549,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_management_v1.services.reachability_service.pagers.ListConnectivityTestsAsyncPager", - "shortName": "list_connectivity_tests" + "resultType": "google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.pagers.ListVpcFlowLogsConfigsAsyncPager", + "shortName": "list_vpc_flow_logs_configs" }, - "description": "Sample for ListConnectivityTests", - "file": "networkmanagement_v1_generated_reachability_service_list_connectivity_tests_async.py", + "description": "Sample for ListVpcFlowLogsConfigs", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_ListConnectivityTests_async", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_ListVpcFlowLogsConfigs_async", "segments": [ { "end": 52, @@ -589,28 +589,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_list_connectivity_tests_async.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", - "shortName": "ReachabilityServiceClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient", + "shortName": "OrganizationVpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.list_connectivity_tests", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient.list_vpc_flow_logs_configs", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.ListConnectivityTests", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.ListVpcFlowLogsConfigs", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "ListConnectivityTests" + "shortName": "ListVpcFlowLogsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.ListConnectivityTestsRequest" + "type": "google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest" }, { "name": "parent", @@ -629,14 +629,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_management_v1.services.reachability_service.pagers.ListConnectivityTestsPager", - "shortName": "list_connectivity_tests" + "resultType": "google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.pagers.ListVpcFlowLogsConfigsPager", + "shortName": "list_vpc_flow_logs_configs" }, - "description": "Sample for ListConnectivityTests", - "file": "networkmanagement_v1_generated_reachability_service_list_connectivity_tests_sync.py", + "description": "Sample for ListVpcFlowLogsConfigs", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_ListConnectivityTests_sync", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_ListVpcFlowLogsConfigs_sync", "segments": [ { "end": 52, @@ -669,29 +669,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_list_connectivity_tests_sync.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py" }, { "canonical": true, "clientMethod": { "async": true, "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", - "shortName": "ReachabilityServiceAsyncClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient", + "shortName": "OrganizationVpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.rerun_connectivity_test", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceAsyncClient.update_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.RerunConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.UpdateVpcFlowLogsConfig", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "RerunConnectivityTest" + "shortName": "UpdateVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.RerunConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.UpdateVpcFlowLogsConfigRequest" + }, + { + "name": "vpc_flow_logs_config", + "type": "google.cloud.network_management_v1.types.VpcFlowLogsConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -707,21 +715,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "rerun_connectivity_test" + "shortName": "update_vpc_flow_logs_config" }, - "description": "Sample for RerunConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_async.py", + "description": "Sample for UpdateVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_RerunConnectivityTest_async", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_UpdateVpcFlowLogsConfig_async", "segments": [ { - "end": 55, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 58, "start": 27, "type": "SHORT" }, @@ -731,43 +739,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_async.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_async.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", - "shortName": "ReachabilityServiceClient" + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient", + "shortName": "OrganizationVpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.rerun_connectivity_test", + "fullName": "google.cloud.network_management_v1.OrganizationVpcFlowLogsServiceClient.update_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.RerunConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService.UpdateVpcFlowLogsConfig", "service": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", - "shortName": "ReachabilityService" + "fullName": "google.cloud.networkmanagement.v1.OrganizationVpcFlowLogsService", + "shortName": "OrganizationVpcFlowLogsService" }, - "shortName": "RerunConnectivityTest" + "shortName": "UpdateVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.RerunConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.UpdateVpcFlowLogsConfigRequest" + }, + { + "name": "vpc_flow_logs_config", + "type": "google.cloud.network_management_v1.types.VpcFlowLogsConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" }, { "name": "retry", @@ -783,21 +799,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "rerun_connectivity_test" + "shortName": "update_vpc_flow_logs_config" }, - "description": "Sample for RerunConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_sync.py", + "description": "Sample for UpdateVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_RerunConnectivityTest_sync", + "regionTag": "networkmanagement_v1_generated_OrganizationVpcFlowLogsService_UpdateVpcFlowLogsConfig_sync", "segments": [ { - "end": 55, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 58, "start": 27, "type": "SHORT" }, @@ -807,22 +823,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 55, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_sync.py" + "title": "networkmanagement_v1_generated_organization_vpc_flow_logs_service_update_vpc_flow_logs_config_sync.py" }, { "canonical": true, @@ -832,23 +848,27 @@ "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", "shortName": "ReachabilityServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.update_connectivity_test", + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.create_connectivity_test", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.UpdateConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.CreateConnectivityTest", "service": { "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", "shortName": "ReachabilityService" }, - "shortName": "UpdateConnectivityTest" + "shortName": "CreateConnectivityTest" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.UpdateConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.CreateConnectivityTestRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "parent", + "type": "str" + }, + { + "name": "test_id", + "type": "str" }, { "name": "resource", @@ -868,21 +888,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_connectivity_test" + "shortName": "create_connectivity_test" }, - "description": "Sample for UpdateConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_update_connectivity_test_async.py", + "description": "Sample for CreateConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_create_connectivity_test_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_UpdateConnectivityTest_async", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_CreateConnectivityTest_async", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -892,22 +912,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_update_connectivity_test_async.py" + "title": "networkmanagement_v1_generated_reachability_service_create_connectivity_test_async.py" }, { "canonical": true, @@ -916,23 +936,27 @@ "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", "shortName": "ReachabilityServiceClient" }, - "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.update_connectivity_test", + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.create_connectivity_test", "method": { - "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.UpdateConnectivityTest", + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.CreateConnectivityTest", "service": { "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", "shortName": "ReachabilityService" }, - "shortName": "UpdateConnectivityTest" + "shortName": "CreateConnectivityTest" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.UpdateConnectivityTestRequest" + "type": "google.cloud.network_management_v1.types.CreateConnectivityTestRequest" }, { - "name": "update_mask", - "type": "google.protobuf.field_mask_pb2.FieldMask" + "name": "parent", + "type": "str" + }, + { + "name": "test_id", + "type": "str" }, { "name": "resource", @@ -952,21 +976,1164 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "update_connectivity_test" + "shortName": "create_connectivity_test" }, - "description": "Sample for UpdateConnectivityTest", - "file": "networkmanagement_v1_generated_reachability_service_update_connectivity_test_sync.py", + "description": "Sample for CreateConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_create_connectivity_test_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_ReachabilityService_UpdateConnectivityTest_sync", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_CreateConnectivityTest_sync", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_create_connectivity_test_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", + "shortName": "ReachabilityServiceAsyncClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.delete_connectivity_test", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.DeleteConnectivityTest", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "DeleteConnectivityTest" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.DeleteConnectivityTestRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_connectivity_test" + }, + "description": "Sample for DeleteConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_delete_connectivity_test_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_DeleteConnectivityTest_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_delete_connectivity_test_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", + "shortName": "ReachabilityServiceClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.delete_connectivity_test", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.DeleteConnectivityTest", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "DeleteConnectivityTest" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.DeleteConnectivityTestRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_connectivity_test" + }, + "description": "Sample for DeleteConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_delete_connectivity_test_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_DeleteConnectivityTest_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_delete_connectivity_test_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", + "shortName": "ReachabilityServiceAsyncClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.get_connectivity_test", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.GetConnectivityTest", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "GetConnectivityTest" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.GetConnectivityTestRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_management_v1.types.ConnectivityTest", + "shortName": "get_connectivity_test" + }, + "description": "Sample for GetConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_get_connectivity_test_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_GetConnectivityTest_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_get_connectivity_test_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", + "shortName": "ReachabilityServiceClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.get_connectivity_test", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.GetConnectivityTest", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "GetConnectivityTest" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.GetConnectivityTestRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_management_v1.types.ConnectivityTest", + "shortName": "get_connectivity_test" + }, + "description": "Sample for GetConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_get_connectivity_test_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_GetConnectivityTest_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_get_connectivity_test_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", + "shortName": "ReachabilityServiceAsyncClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.list_connectivity_tests", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.ListConnectivityTests", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "ListConnectivityTests" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.ListConnectivityTestsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_management_v1.services.reachability_service.pagers.ListConnectivityTestsAsyncPager", + "shortName": "list_connectivity_tests" + }, + "description": "Sample for ListConnectivityTests", + "file": "networkmanagement_v1_generated_reachability_service_list_connectivity_tests_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_ListConnectivityTests_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_list_connectivity_tests_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", + "shortName": "ReachabilityServiceClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.list_connectivity_tests", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.ListConnectivityTests", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "ListConnectivityTests" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.ListConnectivityTestsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_management_v1.services.reachability_service.pagers.ListConnectivityTestsPager", + "shortName": "list_connectivity_tests" + }, + "description": "Sample for ListConnectivityTests", + "file": "networkmanagement_v1_generated_reachability_service_list_connectivity_tests_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_ListConnectivityTests_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_list_connectivity_tests_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", + "shortName": "ReachabilityServiceAsyncClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.rerun_connectivity_test", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.RerunConnectivityTest", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "RerunConnectivityTest" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.RerunConnectivityTestRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "rerun_connectivity_test" + }, + "description": "Sample for RerunConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_RerunConnectivityTest_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", + "shortName": "ReachabilityServiceClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.rerun_connectivity_test", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.RerunConnectivityTest", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "RerunConnectivityTest" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.RerunConnectivityTestRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "rerun_connectivity_test" + }, + "description": "Sample for RerunConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_RerunConnectivityTest_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_rerun_connectivity_test_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient", + "shortName": "ReachabilityServiceAsyncClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceAsyncClient.update_connectivity_test", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.UpdateConnectivityTest", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "UpdateConnectivityTest" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.UpdateConnectivityTestRequest" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "resource", + "type": "google.cloud.network_management_v1.types.ConnectivityTest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_connectivity_test" + }, + "description": "Sample for UpdateConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_update_connectivity_test_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_UpdateConnectivityTest_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_update_connectivity_test_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient", + "shortName": "ReachabilityServiceClient" + }, + "fullName": "google.cloud.network_management_v1.ReachabilityServiceClient.update_connectivity_test", + "method": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService.UpdateConnectivityTest", + "service": { + "fullName": "google.cloud.networkmanagement.v1.ReachabilityService", + "shortName": "ReachabilityService" + }, + "shortName": "UpdateConnectivityTest" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.UpdateConnectivityTestRequest" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "resource", + "type": "google.cloud.network_management_v1.types.ConnectivityTest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_connectivity_test" + }, + "description": "Sample for UpdateConnectivityTest", + "file": "networkmanagement_v1_generated_reachability_service_update_connectivity_test_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_ReachabilityService_UpdateConnectivityTest_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_reachability_service_update_connectivity_test_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient", + "shortName": "VpcFlowLogsServiceAsyncClient" + }, + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.create_vpc_flow_logs_config", + "method": { + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.CreateVpcFlowLogsConfig", + "service": { + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", + "shortName": "VpcFlowLogsService" + }, + "shortName": "CreateVpcFlowLogsConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "vpc_flow_logs_config", + "type": "google.cloud.network_management_v1.types.VpcFlowLogsConfig" + }, + { + "name": "vpc_flow_logs_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_vpc_flow_logs_config" + }, + "description": "Sample for CreateVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_CreateVpcFlowLogsConfig_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient", + "shortName": "VpcFlowLogsServiceClient" + }, + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.create_vpc_flow_logs_config", + "method": { + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.CreateVpcFlowLogsConfig", + "service": { + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", + "shortName": "VpcFlowLogsService" + }, + "shortName": "CreateVpcFlowLogsConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "vpc_flow_logs_config", + "type": "google.cloud.network_management_v1.types.VpcFlowLogsConfig" + }, + { + "name": "vpc_flow_logs_config_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_vpc_flow_logs_config" + }, + "description": "Sample for CreateVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_CreateVpcFlowLogsConfig_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient", + "shortName": "VpcFlowLogsServiceAsyncClient" + }, + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.delete_vpc_flow_logs_config", + "method": { + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.DeleteVpcFlowLogsConfig", + "service": { + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", + "shortName": "VpcFlowLogsService" + }, + "shortName": "DeleteVpcFlowLogsConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_vpc_flow_logs_config" + }, + "description": "Sample for DeleteVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_DeleteVpcFlowLogsConfig_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient", + "shortName": "VpcFlowLogsServiceClient" + }, + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.delete_vpc_flow_logs_config", + "method": { + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.DeleteVpcFlowLogsConfig", + "service": { + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", + "shortName": "VpcFlowLogsService" + }, + "shortName": "DeleteVpcFlowLogsConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_vpc_flow_logs_config" + }, + "description": "Sample for DeleteVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_DeleteVpcFlowLogsConfig_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, "start": 27, "type": "SHORT" }, @@ -976,22 +2143,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_reachability_service_update_connectivity_test_sync.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py" }, { "canonical": true, @@ -1001,30 +2168,22 @@ "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient", "shortName": "VpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.create_vpc_flow_logs_config", + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.get_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.CreateVpcFlowLogsConfig", + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.GetVpcFlowLogsConfig", "service": { "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", "shortName": "VpcFlowLogsService" }, - "shortName": "CreateVpcFlowLogsConfig" + "shortName": "GetVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "vpc_flow_logs_config", - "type": "google.cloud.network_management_v1.types.VpcFlowLogsConfig" + "type": "google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest" }, { - "name": "vpc_flow_logs_config_id", + "name": "name", "type": "str" }, { @@ -1040,22 +2199,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_vpc_flow_logs_config" + "resultType": "google.cloud.network_management_v1.types.VpcFlowLogsConfig", + "shortName": "get_vpc_flow_logs_config" }, - "description": "Sample for CreateVpcFlowLogsConfig", - "file": "networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py", + "description": "Sample for GetVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_CreateVpcFlowLogsConfig_async", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_GetVpcFlowLogsConfig_async", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1065,22 +2224,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_async.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py" }, { "canonical": true, @@ -1089,30 +2248,22 @@ "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient", "shortName": "VpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.create_vpc_flow_logs_config", + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.get_vpc_flow_logs_config", "method": { - "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.CreateVpcFlowLogsConfig", + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.GetVpcFlowLogsConfig", "service": { "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", "shortName": "VpcFlowLogsService" }, - "shortName": "CreateVpcFlowLogsConfig" + "shortName": "GetVpcFlowLogsConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.CreateVpcFlowLogsConfigRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "vpc_flow_logs_config", - "type": "google.cloud.network_management_v1.types.VpcFlowLogsConfig" + "type": "google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest" }, { - "name": "vpc_flow_logs_config_id", + "name": "name", "type": "str" }, { @@ -1128,22 +2279,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_vpc_flow_logs_config" + "resultType": "google.cloud.network_management_v1.types.VpcFlowLogsConfig", + "shortName": "get_vpc_flow_logs_config" }, - "description": "Sample for CreateVpcFlowLogsConfig", - "file": "networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py", + "description": "Sample for GetVpcFlowLogsConfig", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_CreateVpcFlowLogsConfig_sync", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_GetVpcFlowLogsConfig_sync", "segments": [ { - "end": 60, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1153,22 +2304,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_vpc_flow_logs_service_create_vpc_flow_logs_config_sync.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py" }, { "canonical": true, @@ -1178,22 +2329,22 @@ "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient", "shortName": "VpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.delete_vpc_flow_logs_config", + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.list_vpc_flow_logs_configs", "method": { - "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.DeleteVpcFlowLogsConfig", + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.ListVpcFlowLogsConfigs", "service": { "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", "shortName": "VpcFlowLogsService" }, - "shortName": "DeleteVpcFlowLogsConfig" + "shortName": "ListVpcFlowLogsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest" + "type": "google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1209,22 +2360,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_vpc_flow_logs_config" + "resultType": "google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.ListVpcFlowLogsConfigsAsyncPager", + "shortName": "list_vpc_flow_logs_configs" }, - "description": "Sample for DeleteVpcFlowLogsConfig", - "file": "networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py", + "description": "Sample for ListVpcFlowLogsConfigs", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_DeleteVpcFlowLogsConfig_async", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_ListVpcFlowLogsConfigs_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1239,17 +2390,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_async.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py" }, { "canonical": true, @@ -1258,22 +2409,22 @@ "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient", "shortName": "VpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.delete_vpc_flow_logs_config", + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.list_vpc_flow_logs_configs", "method": { - "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.DeleteVpcFlowLogsConfig", + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.ListVpcFlowLogsConfigs", "service": { "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", "shortName": "VpcFlowLogsService" }, - "shortName": "DeleteVpcFlowLogsConfig" + "shortName": "ListVpcFlowLogsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.DeleteVpcFlowLogsConfigRequest" + "type": "google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1289,22 +2440,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_vpc_flow_logs_config" + "resultType": "google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.ListVpcFlowLogsConfigsPager", + "shortName": "list_vpc_flow_logs_configs" }, - "description": "Sample for DeleteVpcFlowLogsConfig", - "file": "networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py", + "description": "Sample for ListVpcFlowLogsConfigs", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_DeleteVpcFlowLogsConfig_sync", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_ListVpcFlowLogsConfigs_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1319,17 +2470,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_vpc_flow_logs_service_delete_vpc_flow_logs_config_sync.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py" }, { "canonical": true, @@ -1339,23 +2490,19 @@ "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient", "shortName": "VpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.get_vpc_flow_logs_config", + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.query_org_vpc_flow_logs_configs", "method": { - "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.GetVpcFlowLogsConfig", + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.QueryOrgVpcFlowLogsConfigs", "service": { "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", "shortName": "VpcFlowLogsService" }, - "shortName": "GetVpcFlowLogsConfig" + "shortName": "QueryOrgVpcFlowLogsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsRequest" }, { "name": "retry", @@ -1370,22 +2517,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_management_v1.types.VpcFlowLogsConfig", - "shortName": "get_vpc_flow_logs_config" + "resultType": "google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.QueryOrgVpcFlowLogsConfigsAsyncPager", + "shortName": "query_org_vpc_flow_logs_configs" }, - "description": "Sample for GetVpcFlowLogsConfig", - "file": "networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py", + "description": "Sample for QueryOrgVpcFlowLogsConfigs", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_GetVpcFlowLogsConfig_async", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_QueryOrgVpcFlowLogsConfigs_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1405,12 +2552,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_async.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_async.py" }, { "canonical": true, @@ -1419,23 +2566,19 @@ "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient", "shortName": "VpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.get_vpc_flow_logs_config", + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.query_org_vpc_flow_logs_configs", "method": { - "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.GetVpcFlowLogsConfig", + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.QueryOrgVpcFlowLogsConfigs", "service": { "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", "shortName": "VpcFlowLogsService" }, - "shortName": "GetVpcFlowLogsConfig" + "shortName": "QueryOrgVpcFlowLogsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.GetVpcFlowLogsConfigRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.network_management_v1.types.QueryOrgVpcFlowLogsConfigsRequest" }, { "name": "retry", @@ -1450,22 +2593,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_management_v1.types.VpcFlowLogsConfig", - "shortName": "get_vpc_flow_logs_config" + "resultType": "google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.QueryOrgVpcFlowLogsConfigsPager", + "shortName": "query_org_vpc_flow_logs_configs" }, - "description": "Sample for GetVpcFlowLogsConfig", - "file": "networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py", + "description": "Sample for QueryOrgVpcFlowLogsConfigs", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_GetVpcFlowLogsConfig_sync", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_QueryOrgVpcFlowLogsConfigs_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1485,12 +2628,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_vpc_flow_logs_service_get_vpc_flow_logs_config_sync.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_query_org_vpc_flow_logs_configs_sync.py" }, { "canonical": true, @@ -1500,23 +2643,19 @@ "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient", "shortName": "VpcFlowLogsServiceAsyncClient" }, - "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.list_vpc_flow_logs_configs", + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceAsyncClient.show_effective_flow_logs_configs", "method": { - "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.ListVpcFlowLogsConfigs", + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.ShowEffectiveFlowLogsConfigs", "service": { "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", "shortName": "VpcFlowLogsService" }, - "shortName": "ListVpcFlowLogsConfigs" + "shortName": "ShowEffectiveFlowLogsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsRequest" }, { "name": "retry", @@ -1531,22 +2670,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.ListVpcFlowLogsConfigsAsyncPager", - "shortName": "list_vpc_flow_logs_configs" + "resultType": "google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.ShowEffectiveFlowLogsConfigsAsyncPager", + "shortName": "show_effective_flow_logs_configs" }, - "description": "Sample for ListVpcFlowLogsConfigs", - "file": "networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py", + "description": "Sample for ShowEffectiveFlowLogsConfigs", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_ListVpcFlowLogsConfigs_async", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_ShowEffectiveFlowLogsConfigs_async", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -1556,22 +2695,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_async.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_async.py" }, { "canonical": true, @@ -1580,23 +2719,19 @@ "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient", "shortName": "VpcFlowLogsServiceClient" }, - "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.list_vpc_flow_logs_configs", + "fullName": "google.cloud.network_management_v1.VpcFlowLogsServiceClient.show_effective_flow_logs_configs", "method": { - "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.ListVpcFlowLogsConfigs", + "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService.ShowEffectiveFlowLogsConfigs", "service": { "fullName": "google.cloud.networkmanagement.v1.VpcFlowLogsService", "shortName": "VpcFlowLogsService" }, - "shortName": "ListVpcFlowLogsConfigs" + "shortName": "ShowEffectiveFlowLogsConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_management_v1.types.ListVpcFlowLogsConfigsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.network_management_v1.types.ShowEffectiveFlowLogsConfigsRequest" }, { "name": "retry", @@ -1611,22 +2746,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.ListVpcFlowLogsConfigsPager", - "shortName": "list_vpc_flow_logs_configs" + "resultType": "google.cloud.network_management_v1.services.vpc_flow_logs_service.pagers.ShowEffectiveFlowLogsConfigsPager", + "shortName": "show_effective_flow_logs_configs" }, - "description": "Sample for ListVpcFlowLogsConfigs", - "file": "networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py", + "description": "Sample for ShowEffectiveFlowLogsConfigs", + "file": "networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_ListVpcFlowLogsConfigs_sync", + "regionTag": "networkmanagement_v1_generated_VpcFlowLogsService_ShowEffectiveFlowLogsConfigs_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -1636,22 +2771,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "networkmanagement_v1_generated_vpc_flow_logs_service_list_vpc_flow_logs_configs_sync.py" + "title": "networkmanagement_v1_generated_vpc_flow_logs_service_show_effective_flow_logs_configs_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-network-management/scripts/fixup_network_management_v1_keywords.py b/packages/google-cloud-network-management/scripts/fixup_network_management_v1_keywords.py index d0bbd52255f3..4883cedc4e17 100644 --- a/packages/google-cloud-network-management/scripts/fixup_network_management_v1_keywords.py +++ b/packages/google-cloud-network-management/scripts/fixup_network_management_v1_keywords.py @@ -47,7 +47,9 @@ class network_managementCallTransformer(cst.CSTTransformer): 'get_vpc_flow_logs_config': ('name', ), 'list_connectivity_tests': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_vpc_flow_logs_configs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'query_org_vpc_flow_logs_configs': ('parent', 'page_size', 'page_token', 'filter', ), 'rerun_connectivity_test': ('name', ), + 'show_effective_flow_logs_configs': ('parent', 'resource', 'page_size', 'page_token', 'filter', ), 'update_connectivity_test': ('update_mask', 'resource', ), 'update_vpc_flow_logs_config': ('update_mask', 'vpc_flow_logs_config', ), } diff --git a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_organization_vpc_flow_logs_service.py b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_organization_vpc_flow_logs_service.py new file mode 100644 index 000000000000..87890c79b4f9 --- /dev/null +++ b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_organization_vpc_flow_logs_service.py @@ -0,0 +1,8406 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.network_management_v1.services.organization_vpc_flow_logs_service import ( + OrganizationVpcFlowLogsServiceAsyncClient, + OrganizationVpcFlowLogsServiceClient, + pagers, + transports, +) +from google.cloud.network_management_v1.types import reachability, vpc_flow_logs +from google.cloud.network_management_v1.types import ( + vpc_flow_logs_config as gcn_vpc_flow_logs_config, +) +from google.cloud.network_management_v1.types import vpc_flow_logs_config + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert OrganizationVpcFlowLogsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + OrganizationVpcFlowLogsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert OrganizationVpcFlowLogsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert OrganizationVpcFlowLogsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert OrganizationVpcFlowLogsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + OrganizationVpcFlowLogsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert OrganizationVpcFlowLogsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert OrganizationVpcFlowLogsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert OrganizationVpcFlowLogsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + OrganizationVpcFlowLogsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert OrganizationVpcFlowLogsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + OrganizationVpcFlowLogsServiceClient._get_client_cert_source(None, False) + is None + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + OrganizationVpcFlowLogsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + OrganizationVpcFlowLogsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationVpcFlowLogsServiceClient), +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationVpcFlowLogsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = OrganizationVpcFlowLogsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + OrganizationVpcFlowLogsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + OrganizationVpcFlowLogsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + assert ( + OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == OrganizationVpcFlowLogsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == OrganizationVpcFlowLogsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == OrganizationVpcFlowLogsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + OrganizationVpcFlowLogsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + OrganizationVpcFlowLogsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + OrganizationVpcFlowLogsServiceClient._get_universe_domain(None, None) + == OrganizationVpcFlowLogsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + OrganizationVpcFlowLogsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = OrganizationVpcFlowLogsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = OrganizationVpcFlowLogsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OrganizationVpcFlowLogsServiceClient, "grpc"), + (OrganizationVpcFlowLogsServiceAsyncClient, "grpc_asyncio"), + (OrganizationVpcFlowLogsServiceClient, "rest"), + ], +) +def test_organization_vpc_flow_logs_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networkmanagement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networkmanagement.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.OrganizationVpcFlowLogsServiceGrpcTransport, "grpc"), + (transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.OrganizationVpcFlowLogsServiceRestTransport, "rest"), + ], +) +def test_organization_vpc_flow_logs_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (OrganizationVpcFlowLogsServiceClient, "grpc"), + (OrganizationVpcFlowLogsServiceAsyncClient, "grpc_asyncio"), + (OrganizationVpcFlowLogsServiceClient, "rest"), + ], +) +def test_organization_vpc_flow_logs_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "networkmanagement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networkmanagement.googleapis.com" + ) + + +def test_organization_vpc_flow_logs_service_client_get_transport_class(): + transport = OrganizationVpcFlowLogsServiceClient.get_transport_class() + available_transports = [ + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + transports.OrganizationVpcFlowLogsServiceRestTransport, + ] + assert transport in available_transports + + transport = OrganizationVpcFlowLogsServiceClient.get_transport_class("grpc") + assert transport == transports.OrganizationVpcFlowLogsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + "grpc", + ), + ( + OrganizationVpcFlowLogsServiceAsyncClient, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationVpcFlowLogsServiceClient), +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationVpcFlowLogsServiceAsyncClient), +) +def test_organization_vpc_flow_logs_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + OrganizationVpcFlowLogsServiceClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + OrganizationVpcFlowLogsServiceClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + "grpc", + "true", + ), + ( + OrganizationVpcFlowLogsServiceAsyncClient, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + "grpc", + "false", + ), + ( + OrganizationVpcFlowLogsServiceAsyncClient, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceRestTransport, + "rest", + "true", + ), + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationVpcFlowLogsServiceClient), +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationVpcFlowLogsServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_organization_vpc_flow_logs_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", + [OrganizationVpcFlowLogsServiceClient, OrganizationVpcFlowLogsServiceAsyncClient], +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OrganizationVpcFlowLogsServiceClient), +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(OrganizationVpcFlowLogsServiceAsyncClient), +) +def test_organization_vpc_flow_logs_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", + [OrganizationVpcFlowLogsServiceClient, OrganizationVpcFlowLogsServiceAsyncClient], +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationVpcFlowLogsServiceClient), +) +@mock.patch.object( + OrganizationVpcFlowLogsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(OrganizationVpcFlowLogsServiceAsyncClient), +) +def test_organization_vpc_flow_logs_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = OrganizationVpcFlowLogsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ( + OrganizationVpcFlowLogsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = ( + OrganizationVpcFlowLogsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + "grpc", + ), + ( + OrganizationVpcFlowLogsServiceAsyncClient, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceRestTransport, + "rest", + ), + ], +) +def test_organization_vpc_flow_logs_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OrganizationVpcFlowLogsServiceAsyncClient, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceRestTransport, + "rest", + None, + ), + ], +) +def test_organization_vpc_flow_logs_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_organization_vpc_flow_logs_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.transports.OrganizationVpcFlowLogsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = OrganizationVpcFlowLogsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + OrganizationVpcFlowLogsServiceAsyncClient, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_organization_vpc_flow_logs_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "networkmanagement.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="networkmanagement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.ListVpcFlowLogsConfigsRequest, + dict, + ], +) +def test_list_vpc_flow_logs_configs(request_type, transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVpcFlowLogsConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_vpc_flow_logs_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_vpc_flow_logs_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vpc_flow_logs.ListVpcFlowLogsConfigsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_vpc_flow_logs_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_vpc_flow_logs_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_vpc_flow_logs_configs + ] = mock_rpc + request = {} + client.list_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_vpc_flow_logs_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_vpc_flow_logs_configs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_vpc_flow_logs_configs + ] = mock_rpc + + request = {} + await client.list_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_vpc_flow_logs_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_async( + transport: str = "grpc_asyncio", + request_type=vpc_flow_logs.ListVpcFlowLogsConfigsRequest, +): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVpcFlowLogsConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_async_from_dict(): + await test_list_vpc_flow_logs_configs_async(request_type=dict) + + +def test_list_vpc_flow_logs_configs_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + call.return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + client.list_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + ) + await client.list_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_vpc_flow_logs_configs_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_vpc_flow_logs_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_vpc_flow_logs_configs_flattened_error(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vpc_flow_logs_configs( + vpc_flow_logs.ListVpcFlowLogsConfigsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_flattened_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_vpc_flow_logs_configs( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_flattened_error_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_vpc_flow_logs_configs( + vpc_flow_logs.ListVpcFlowLogsConfigsRequest(), + parent="parent_value", + ) + + +def test_list_vpc_flow_logs_configs_pager(transport_name: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_vpc_flow_logs_configs( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.VpcFlowLogsConfig) for i in results + ) + + +def test_list_vpc_flow_logs_configs_pages(transport_name: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.list_vpc_flow_logs_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_async_pager(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_vpc_flow_logs_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.VpcFlowLogsConfig) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_async_pages(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_vpc_flow_logs_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.GetVpcFlowLogsConfigRequest, + dict, + ], +) +def test_get_vpc_flow_logs_config(request_type, transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value", + description="description_value", + state=vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED, + aggregation_interval=vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC, + flow_sampling=0.1394, + metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, + metadata_fields=["metadata_fields_value"], + filter_expr="filter_expr_value", + cross_project_metadata=vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED, + target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, + network="network_value", + ) + response = client.get_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, vpc_flow_logs_config.VpcFlowLogsConfig) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED + assert ( + response.aggregation_interval + == vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC + ) + assert math.isclose(response.flow_sampling, 0.1394, rel_tol=1e-6) + assert ( + response.metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA + ) + assert response.metadata_fields == ["metadata_fields_value"] + assert response.filter_expr == "filter_expr_value" + assert ( + response.cross_project_metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED + ) + assert ( + response.target_resource_state + == vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS + ) + + +def test_get_vpc_flow_logs_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_vpc_flow_logs_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vpc_flow_logs.GetVpcFlowLogsConfigRequest( + name="name_value", + ) + + +def test_get_vpc_flow_logs_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_vpc_flow_logs_config + ] = mock_rpc + request = {} + client.get_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_vpc_flow_logs_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_vpc_flow_logs_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_vpc_flow_logs_config + ] = mock_rpc + + request = {} + await client.get_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_vpc_flow_logs_config_async( + transport: str = "grpc_asyncio", + request_type=vpc_flow_logs.GetVpcFlowLogsConfigRequest, +): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value", + description="description_value", + state=vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED, + aggregation_interval=vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC, + flow_sampling=0.1394, + metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, + metadata_fields=["metadata_fields_value"], + filter_expr="filter_expr_value", + cross_project_metadata=vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED, + target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, + ) + ) + response = await client.get_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, vpc_flow_logs_config.VpcFlowLogsConfig) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED + assert ( + response.aggregation_interval + == vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC + ) + assert math.isclose(response.flow_sampling, 0.1394, rel_tol=1e-6) + assert ( + response.metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA + ) + assert response.metadata_fields == ["metadata_fields_value"] + assert response.filter_expr == "filter_expr_value" + assert ( + response.cross_project_metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED + ) + assert ( + response.target_resource_state + == vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS + ) + + +@pytest.mark.asyncio +async def test_get_vpc_flow_logs_config_async_from_dict(): + await test_get_vpc_flow_logs_config_async(request_type=dict) + + +def test_get_vpc_flow_logs_config_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + client.get_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_vpc_flow_logs_config_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs_config.VpcFlowLogsConfig() + ) + await client.get_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_vpc_flow_logs_config_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_vpc_flow_logs_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_vpc_flow_logs_config_flattened_error(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vpc_flow_logs_config( + vpc_flow_logs.GetVpcFlowLogsConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_vpc_flow_logs_config_flattened_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs_config.VpcFlowLogsConfig() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_vpc_flow_logs_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_vpc_flow_logs_config_flattened_error_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_vpc_flow_logs_config( + vpc_flow_logs.GetVpcFlowLogsConfigRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.CreateVpcFlowLogsConfigRequest, + dict, + ], +) +def test_create_vpc_flow_logs_config(request_type, transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_vpc_flow_logs_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest( + parent="parent_value", + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_vpc_flow_logs_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vpc_flow_logs.CreateVpcFlowLogsConfigRequest( + parent="parent_value", + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + + +def test_create_vpc_flow_logs_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_vpc_flow_logs_config + ] = mock_rpc + request = {} + client.create_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_vpc_flow_logs_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_vpc_flow_logs_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_vpc_flow_logs_config + ] = mock_rpc + + request = {} + await client.create_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_vpc_flow_logs_config_async( + transport: str = "grpc_asyncio", + request_type=vpc_flow_logs.CreateVpcFlowLogsConfigRequest, +): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_vpc_flow_logs_config_async_from_dict(): + await test_create_vpc_flow_logs_config_async(request_type=dict) + + +def test_create_vpc_flow_logs_config_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_vpc_flow_logs_config_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_vpc_flow_logs_config_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_vpc_flow_logs_config( + parent="parent_value", + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].vpc_flow_logs_config + mock_val = gcn_vpc_flow_logs_config.VpcFlowLogsConfig(name="name_value") + assert arg == mock_val + arg = args[0].vpc_flow_logs_config_id + mock_val = "vpc_flow_logs_config_id_value" + assert arg == mock_val + + +def test_create_vpc_flow_logs_config_flattened_error(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_vpc_flow_logs_config( + vpc_flow_logs.CreateVpcFlowLogsConfigRequest(), + parent="parent_value", + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_vpc_flow_logs_config_flattened_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_vpc_flow_logs_config( + parent="parent_value", + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].vpc_flow_logs_config + mock_val = gcn_vpc_flow_logs_config.VpcFlowLogsConfig(name="name_value") + assert arg == mock_val + arg = args[0].vpc_flow_logs_config_id + mock_val = "vpc_flow_logs_config_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_vpc_flow_logs_config_flattened_error_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_vpc_flow_logs_config( + vpc_flow_logs.CreateVpcFlowLogsConfigRequest(), + parent="parent_value", + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, + dict, + ], +) +def test_update_vpc_flow_logs_config(request_type, transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_vpc_flow_logs_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_vpc_flow_logs_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + + +def test_update_vpc_flow_logs_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_vpc_flow_logs_config + ] = mock_rpc + request = {} + client.update_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_vpc_flow_logs_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_vpc_flow_logs_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_vpc_flow_logs_config + ] = mock_rpc + + request = {} + await client.update_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_vpc_flow_logs_config_async( + transport: str = "grpc_asyncio", + request_type=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, +): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_vpc_flow_logs_config_async_from_dict(): + await test_update_vpc_flow_logs_config_async(request_type=dict) + + +def test_update_vpc_flow_logs_config_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + + request.vpc_flow_logs_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "vpc_flow_logs_config.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_vpc_flow_logs_config_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + + request.vpc_flow_logs_config.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "vpc_flow_logs_config.name=name_value", + ) in kw["metadata"] + + +def test_update_vpc_flow_logs_config_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_vpc_flow_logs_config( + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].vpc_flow_logs_config + mock_val = gcn_vpc_flow_logs_config.VpcFlowLogsConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_vpc_flow_logs_config_flattened_error(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_vpc_flow_logs_config( + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest(), + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_vpc_flow_logs_config_flattened_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_vpc_flow_logs_config( + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].vpc_flow_logs_config + mock_val = gcn_vpc_flow_logs_config.VpcFlowLogsConfig(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_vpc_flow_logs_config_flattened_error_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_vpc_flow_logs_config( + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest(), + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, + dict, + ], +) +def test_delete_vpc_flow_logs_config(request_type, transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_vpc_flow_logs_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_vpc_flow_logs_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vpc_flow_logs.DeleteVpcFlowLogsConfigRequest( + name="name_value", + ) + + +def test_delete_vpc_flow_logs_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_vpc_flow_logs_config + ] = mock_rpc + request = {} + client.delete_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_vpc_flow_logs_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_vpc_flow_logs_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_vpc_flow_logs_config + ] = mock_rpc + + request = {} + await client.delete_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_vpc_flow_logs_config_async( + transport: str = "grpc_asyncio", + request_type=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, +): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_vpc_flow_logs_config_async_from_dict(): + await test_delete_vpc_flow_logs_config_async(request_type=dict) + + +def test_delete_vpc_flow_logs_config_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_vpc_flow_logs_config_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_vpc_flow_logs_config_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_vpc_flow_logs_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_vpc_flow_logs_config_flattened_error(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_vpc_flow_logs_config( + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_vpc_flow_logs_config_flattened_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_vpc_flow_logs_config( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_vpc_flow_logs_config_flattened_error_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_vpc_flow_logs_config( + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest(), + name="name_value", + ) + + +def test_list_vpc_flow_logs_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_vpc_flow_logs_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_vpc_flow_logs_configs + ] = mock_rpc + + request = {} + client.list_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_vpc_flow_logs_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_vpc_flow_logs_configs_rest_required_fields( + request_type=vpc_flow_logs.ListVpcFlowLogsConfigsRequest, +): + transport_class = transports.OrganizationVpcFlowLogsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_vpc_flow_logs_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_vpc_flow_logs_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_vpc_flow_logs_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_vpc_flow_logs_configs_rest_unset_required_fields(): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_vpc_flow_logs_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_vpc_flow_logs_configs_rest_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_vpc_flow_logs_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/vpcFlowLogsConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_vpc_flow_logs_configs_rest_flattened_error(transport: str = "rest"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vpc_flow_logs_configs( + vpc_flow_logs.ListVpcFlowLogsConfigsRequest(), + parent="parent_value", + ) + + +def test_list_vpc_flow_logs_configs_rest_pager(transport: str = "rest"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + pager = client.list_vpc_flow_logs_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.VpcFlowLogsConfig) for i in results + ) + + pages = list(client.list_vpc_flow_logs_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_vpc_flow_logs_config + ] = mock_rpc + + request = {} + client.get_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_vpc_flow_logs_config_rest_required_fields( + request_type=vpc_flow_logs.GetVpcFlowLogsConfigRequest, +): + transport_class = transports.OrganizationVpcFlowLogsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_vpc_flow_logs_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_vpc_flow_logs_config_rest_unset_required_fields(): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_vpc_flow_logs_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_vpc_flow_logs_config_rest_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_vpc_flow_logs_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/vpcFlowLogsConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vpc_flow_logs_config( + vpc_flow_logs.GetVpcFlowLogsConfigRequest(), + name="name_value", + ) + + +def test_create_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_vpc_flow_logs_config + ] = mock_rpc + + request = {} + client.create_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_vpc_flow_logs_config_rest_required_fields( + request_type=vpc_flow_logs.CreateVpcFlowLogsConfigRequest, +): + transport_class = transports.OrganizationVpcFlowLogsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["vpc_flow_logs_config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "vpcFlowLogsConfigId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "vpcFlowLogsConfigId" in jsonified_request + assert ( + jsonified_request["vpcFlowLogsConfigId"] + == request_init["vpc_flow_logs_config_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["vpcFlowLogsConfigId"] = "vpc_flow_logs_config_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("vpc_flow_logs_config_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "vpcFlowLogsConfigId" in jsonified_request + assert jsonified_request["vpcFlowLogsConfigId"] == "vpc_flow_logs_config_id_value" + + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_vpc_flow_logs_config(request) + + expected_params = [ + ( + "vpcFlowLogsConfigId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_vpc_flow_logs_config_rest_unset_required_fields(): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_vpc_flow_logs_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("vpcFlowLogsConfigId",)) + & set( + ( + "parent", + "vpcFlowLogsConfigId", + "vpcFlowLogsConfig", + ) + ) + ) + + +def test_create_vpc_flow_logs_config_rest_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "organizations/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_vpc_flow_logs_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=organizations/*/locations/*}/vpcFlowLogsConfigs" + % client.transport._host, + args[1], + ) + + +def test_create_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_vpc_flow_logs_config( + vpc_flow_logs.CreateVpcFlowLogsConfigRequest(), + parent="parent_value", + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + + +def test_update_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_vpc_flow_logs_config + ] = mock_rpc + + request = {} + client.update_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_vpc_flow_logs_config_rest_required_fields( + request_type=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, +): + transport_class = transports.OrganizationVpcFlowLogsServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_vpc_flow_logs_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_vpc_flow_logs_config_rest_unset_required_fields(): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_vpc_flow_logs_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "vpcFlowLogsConfig", + ) + ) + ) + + +def test_update_vpc_flow_logs_config_rest_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "vpc_flow_logs_config": { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_vpc_flow_logs_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{vpc_flow_logs_config.name=organizations/*/locations/*/vpcFlowLogsConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_vpc_flow_logs_config( + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest(), + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_vpc_flow_logs_config + ] = mock_rpc + + request = {} + client.delete_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_vpc_flow_logs_config_rest_required_fields( + request_type=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, +): + transport_class = transports.OrganizationVpcFlowLogsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_vpc_flow_logs_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_vpc_flow_logs_config_rest_unset_required_fields(): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_vpc_flow_logs_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_vpc_flow_logs_config_rest_flattened(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_vpc_flow_logs_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=organizations/*/locations/*/vpcFlowLogsConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_vpc_flow_logs_config( + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.OrganizationVpcFlowLogsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.OrganizationVpcFlowLogsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OrganizationVpcFlowLogsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.OrganizationVpcFlowLogsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OrganizationVpcFlowLogsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = OrganizationVpcFlowLogsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.OrganizationVpcFlowLogsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = OrganizationVpcFlowLogsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.OrganizationVpcFlowLogsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = OrganizationVpcFlowLogsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.OrganizationVpcFlowLogsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + transports.OrganizationVpcFlowLogsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = OrganizationVpcFlowLogsServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_vpc_flow_logs_configs_empty_call_grpc(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + call.return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + client.list_vpc_flow_logs_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_vpc_flow_logs_config_empty_call_grpc(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + client.get_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_vpc_flow_logs_config_empty_call_grpc(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_vpc_flow_logs_config_empty_call_grpc(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_vpc_flow_logs_config_empty_call_grpc(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = OrganizationVpcFlowLogsServiceAsyncClient.get_transport_class( + "grpc_asyncio" + )(credentials=async_anonymous_credentials()) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_vpc_flow_logs_configs_empty_call_grpc_asyncio(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_vpc_flow_logs_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_vpc_flow_logs_config_empty_call_grpc_asyncio(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value", + description="description_value", + state=vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED, + aggregation_interval=vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC, + flow_sampling=0.1394, + metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, + metadata_fields=["metadata_fields_value"], + filter_expr="filter_expr_value", + cross_project_metadata=vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED, + target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, + ) + ) + await client.get_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_vpc_flow_logs_config_empty_call_grpc_asyncio(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_vpc_flow_logs_config_empty_call_grpc_asyncio(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_vpc_flow_logs_config_empty_call_grpc_asyncio(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = OrganizationVpcFlowLogsServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_vpc_flow_logs_configs_rest_bad_request( + request_type=vpc_flow_logs.ListVpcFlowLogsConfigsRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_vpc_flow_logs_configs(request) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.ListVpcFlowLogsConfigsRequest, + dict, + ], +) +def test_list_vpc_flow_logs_configs_rest_call_success(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_vpc_flow_logs_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListVpcFlowLogsConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_vpc_flow_logs_configs_rest_interceptors(null_interceptor): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationVpcFlowLogsServiceRestInterceptor(), + ) + client = OrganizationVpcFlowLogsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_list_vpc_flow_logs_configs", + ) as post, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_list_vpc_flow_logs_configs_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "pre_list_vpc_flow_logs_configs", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = vpc_flow_logs.ListVpcFlowLogsConfigsRequest.pb( + vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.to_json( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + ) + req.return_value.content = return_value + + request = vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + post_with_metadata.return_value = ( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse(), + metadata, + ) + + client.list_vpc_flow_logs_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_vpc_flow_logs_config_rest_bad_request( + request_type=vpc_flow_logs.GetVpcFlowLogsConfigRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_vpc_flow_logs_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.GetVpcFlowLogsConfigRequest, + dict, + ], +) +def test_get_vpc_flow_logs_config_rest_call_success(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value", + description="description_value", + state=vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED, + aggregation_interval=vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC, + flow_sampling=0.1394, + metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, + metadata_fields=["metadata_fields_value"], + filter_expr="filter_expr_value", + cross_project_metadata=vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED, + target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, + network="network_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_vpc_flow_logs_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vpc_flow_logs_config.VpcFlowLogsConfig) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED + assert ( + response.aggregation_interval + == vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC + ) + assert math.isclose(response.flow_sampling, 0.1394, rel_tol=1e-6) + assert ( + response.metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA + ) + assert response.metadata_fields == ["metadata_fields_value"] + assert response.filter_expr == "filter_expr_value" + assert ( + response.cross_project_metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED + ) + assert ( + response.target_resource_state + == vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_vpc_flow_logs_config_rest_interceptors(null_interceptor): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationVpcFlowLogsServiceRestInterceptor(), + ) + client = OrganizationVpcFlowLogsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_get_vpc_flow_logs_config", + ) as post, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_get_vpc_flow_logs_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "pre_get_vpc_flow_logs_config", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = vpc_flow_logs.GetVpcFlowLogsConfigRequest.pb( + vpc_flow_logs.GetVpcFlowLogsConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = vpc_flow_logs_config.VpcFlowLogsConfig.to_json( + vpc_flow_logs_config.VpcFlowLogsConfig() + ) + req.return_value.content = return_value + + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + post_with_metadata.return_value = ( + vpc_flow_logs_config.VpcFlowLogsConfig(), + metadata, + ) + + client.get_vpc_flow_logs_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_vpc_flow_logs_config_rest_bad_request( + request_type=vpc_flow_logs.CreateVpcFlowLogsConfigRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_vpc_flow_logs_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.CreateVpcFlowLogsConfigRequest, + dict, + ], +) +def test_create_vpc_flow_logs_config_rest_call_success(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "organizations/sample1/locations/sample2"} + request_init["vpc_flow_logs_config"] = { + "name": "name_value", + "description": "description_value", + "state": 1, + "aggregation_interval": 1, + "flow_sampling": 0.1394, + "metadata": 1, + "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], + "filter_expr": "filter_expr_value", + "cross_project_metadata": 1, + "target_resource_state": 1, + "network": "network_value", + "subnet": "subnet_value", + "interconnect_attachment": "interconnect_attachment_value", + "vpn_tunnel": "vpn_tunnel_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vpc_flow_logs.CreateVpcFlowLogsConfigRequest.meta.fields[ + "vpc_flow_logs_config" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "vpc_flow_logs_config" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpc_flow_logs_config"][field])): + del request_init["vpc_flow_logs_config"][field][i][subfield] + else: + del request_init["vpc_flow_logs_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_vpc_flow_logs_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_vpc_flow_logs_config_rest_interceptors(null_interceptor): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationVpcFlowLogsServiceRestInterceptor(), + ) + client = OrganizationVpcFlowLogsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_create_vpc_flow_logs_config", + ) as post, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_create_vpc_flow_logs_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "pre_create_vpc_flow_logs_config", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = vpc_flow_logs.CreateVpcFlowLogsConfigRequest.pb( + vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_vpc_flow_logs_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_vpc_flow_logs_config_rest_bad_request( + request_type=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "vpc_flow_logs_config": { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_vpc_flow_logs_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, + dict, + ], +) +def test_update_vpc_flow_logs_config_rest_call_success(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "vpc_flow_logs_config": { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + } + request_init["vpc_flow_logs_config"] = { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3", + "description": "description_value", + "state": 1, + "aggregation_interval": 1, + "flow_sampling": 0.1394, + "metadata": 1, + "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], + "filter_expr": "filter_expr_value", + "cross_project_metadata": 1, + "target_resource_state": 1, + "network": "network_value", + "subnet": "subnet_value", + "interconnect_attachment": "interconnect_attachment_value", + "vpn_tunnel": "vpn_tunnel_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.meta.fields[ + "vpc_flow_logs_config" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "vpc_flow_logs_config" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpc_flow_logs_config"][field])): + del request_init["vpc_flow_logs_config"][field][i][subfield] + else: + del request_init["vpc_flow_logs_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_vpc_flow_logs_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_vpc_flow_logs_config_rest_interceptors(null_interceptor): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationVpcFlowLogsServiceRestInterceptor(), + ) + client = OrganizationVpcFlowLogsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_update_vpc_flow_logs_config", + ) as post, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_update_vpc_flow_logs_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "pre_update_vpc_flow_logs_config", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.pb( + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_vpc_flow_logs_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_vpc_flow_logs_config_rest_bad_request( + request_type=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_vpc_flow_logs_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, + dict, + ], +) +def test_delete_vpc_flow_logs_config_rest_call_success(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_vpc_flow_logs_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_vpc_flow_logs_config_rest_interceptors(null_interceptor): + transport = transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.OrganizationVpcFlowLogsServiceRestInterceptor(), + ) + client = OrganizationVpcFlowLogsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_delete_vpc_flow_logs_config", + ) as post, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "post_delete_vpc_flow_logs_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.OrganizationVpcFlowLogsServiceRestInterceptor, + "pre_delete_vpc_flow_logs_config", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest.pb( + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_vpc_flow_logs_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/global/connectivityTests/sample2"}, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/global/connectivityTests/sample2" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/global/connectivityTests/sample2"}, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/global/connectivityTests/sample2" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/locations/global/connectivityTests/sample2"}, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "resource": "projects/sample1/locations/global/connectivityTests/sample2" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/global/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/global/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/global/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/global/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/global/operations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/global/operations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/global"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/global"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_vpc_flow_logs_configs_empty_call_rest(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_vpc_flow_logs_configs), "__call__" + ) as call: + client.list_vpc_flow_logs_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.ListVpcFlowLogsConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_vpc_flow_logs_config_empty_call_rest(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_vpc_flow_logs_config), "__call__" + ) as call: + client.get_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_vpc_flow_logs_config_empty_call_rest(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + client.create_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_vpc_flow_logs_config_empty_call_rest(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + client.update_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_vpc_flow_logs_config_empty_call_rest(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_vpc_flow_logs_config), "__call__" + ) as call: + client.delete_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +def test_organization_vpc_flow_logs_service_rest_lro_client(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + ) + + +def test_organization_vpc_flow_logs_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.OrganizationVpcFlowLogsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_organization_vpc_flow_logs_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.transports.OrganizationVpcFlowLogsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OrganizationVpcFlowLogsServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_vpc_flow_logs_configs", + "get_vpc_flow_logs_config", + "create_vpc_flow_logs_config", + "update_vpc_flow_logs_config", + "delete_vpc_flow_logs_config", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_organization_vpc_flow_logs_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.transports.OrganizationVpcFlowLogsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OrganizationVpcFlowLogsServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_organization_vpc_flow_logs_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.network_management_v1.services.organization_vpc_flow_logs_service.transports.OrganizationVpcFlowLogsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.OrganizationVpcFlowLogsServiceTransport() + adc.assert_called_once() + + +def test_organization_vpc_flow_logs_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + OrganizationVpcFlowLogsServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + ], +) +def test_organization_vpc_flow_logs_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + transports.OrganizationVpcFlowLogsServiceRestTransport, + ], +) +def test_organization_vpc_flow_logs_service_transport_auth_gdch_credentials( + transport_class, +): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.OrganizationVpcFlowLogsServiceGrpcTransport, grpc_helpers), + ( + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + grpc_helpers_async, + ), + ], +) +def test_organization_vpc_flow_logs_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "networkmanagement.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="networkmanagement.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + ], +) +def test_organization_vpc_flow_logs_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_organization_vpc_flow_logs_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.OrganizationVpcFlowLogsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_organization_vpc_flow_logs_service_host_no_port(transport_name): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networkmanagement.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networkmanagement.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networkmanagement.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_organization_vpc_flow_logs_service_host_with_port(transport_name): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="networkmanagement.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "networkmanagement.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://networkmanagement.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_organization_vpc_flow_logs_service_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = OrganizationVpcFlowLogsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = OrganizationVpcFlowLogsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_vpc_flow_logs_configs._session + session2 = client2.transport.list_vpc_flow_logs_configs._session + assert session1 != session2 + session1 = client1.transport.get_vpc_flow_logs_config._session + session2 = client2.transport.get_vpc_flow_logs_config._session + assert session1 != session2 + session1 = client1.transport.create_vpc_flow_logs_config._session + session2 = client2.transport.create_vpc_flow_logs_config._session + assert session1 != session2 + session1 = client1.transport.update_vpc_flow_logs_config._session + session2 = client2.transport.update_vpc_flow_logs_config._session + assert session1 != session2 + session1 = client1.transport.delete_vpc_flow_logs_config._session + session2 = client2.transport.delete_vpc_flow_logs_config._session + assert session1 != session2 + + +def test_organization_vpc_flow_logs_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OrganizationVpcFlowLogsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_organization_vpc_flow_logs_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + ], +) +def test_organization_vpc_flow_logs_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + ], +) +def test_organization_vpc_flow_logs_service_transport_channel_mtls_with_adc( + transport_class, +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_organization_vpc_flow_logs_service_grpc_lro_client(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_organization_vpc_flow_logs_service_grpc_lro_async_client(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_vpc_flow_logs_config_path(): + project = "squid" + location = "clam" + vpc_flow_logs_config = "whelk" + expected = "projects/{project}/locations/{location}/vpcFlowLogsConfigs/{vpc_flow_logs_config}".format( + project=project, + location=location, + vpc_flow_logs_config=vpc_flow_logs_config, + ) + actual = OrganizationVpcFlowLogsServiceClient.vpc_flow_logs_config_path( + project, location, vpc_flow_logs_config + ) + assert expected == actual + + +def test_parse_vpc_flow_logs_config_path(): + expected = { + "project": "octopus", + "location": "oyster", + "vpc_flow_logs_config": "nudibranch", + } + path = OrganizationVpcFlowLogsServiceClient.vpc_flow_logs_config_path(**expected) + + # Check that the path construction is reversible. + actual = OrganizationVpcFlowLogsServiceClient.parse_vpc_flow_logs_config_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = OrganizationVpcFlowLogsServiceClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = OrganizationVpcFlowLogsServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = OrganizationVpcFlowLogsServiceClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = OrganizationVpcFlowLogsServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = OrganizationVpcFlowLogsServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = OrganizationVpcFlowLogsServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = OrganizationVpcFlowLogsServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = OrganizationVpcFlowLogsServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = OrganizationVpcFlowLogsServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format( + project=project, + ) + actual = OrganizationVpcFlowLogsServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = OrganizationVpcFlowLogsServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = OrganizationVpcFlowLogsServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = OrganizationVpcFlowLogsServiceClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = OrganizationVpcFlowLogsServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = OrganizationVpcFlowLogsServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.OrganizationVpcFlowLogsServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.OrganizationVpcFlowLogsServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = OrganizationVpcFlowLogsServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = OrganizationVpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = OrganizationVpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + OrganizationVpcFlowLogsServiceClient, + transports.OrganizationVpcFlowLogsServiceGrpcTransport, + ), + ( + OrganizationVpcFlowLogsServiceAsyncClient, + transports.OrganizationVpcFlowLogsServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py index 4335c9c41915..a5bb09959240 100644 --- a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py +++ b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_reachability_service.py @@ -5446,6 +5446,8 @@ def test_create_connectivity_test_rest_call_success(request_type): ], "service_account": "service_account_value", "psc_network_attachment_uri": "psc_network_attachment_uri_value", + "running": True, + "status": 1, }, "firewall": { "display_name": "display_name_value", @@ -5465,6 +5467,8 @@ def test_create_connectivity_test_rest_call_success(request_type): "policy": "policy_value", "policy_uri": "policy_uri_value", "firewall_rule_type": 1, + "policy_priority": 1649, + "target_type": 1, }, "route": { "route_type": 1, @@ -5519,6 +5523,11 @@ def test_create_connectivity_test_rest_call_success(request_type): "psc_service_attachment_uri": "psc_service_attachment_uri_value", "psc_google_api_target": "psc_google_api_target_value", }, + "hybrid_subnet": { + "display_name": "display_name_value", + "uri": "uri_value", + "region": "region_value", + }, "vpn_gateway": { "display_name": "display_name_value", "uri": "uri_value", @@ -5538,6 +5547,15 @@ def test_create_connectivity_test_rest_call_success(request_type): "region": "region_value", "routing_type": 1, }, + "interconnect_attachment": { + "display_name": "display_name_value", + "uri": "uri_value", + "interconnect_uri": "interconnect_uri_value", + "region": "region_value", + "cloud_router_uri": "cloud_router_uri_value", + "type_": 1, + "l2_attachment_matched_ip_address": "l2_attachment_matched_ip_address_value", + }, "vpc_connector": { "display_name": "display_name_value", "uri": "uri_value", @@ -5559,6 +5577,7 @@ def test_create_connectivity_test_rest_call_success(request_type): "ip_address": "ip_address_value", "storage_bucket": "storage_bucket_value", "psc_google_api_target": "psc_google_api_target_value", + "google_service_type": 1, }, "forward": { "target": 1, @@ -5580,6 +5599,8 @@ def test_create_connectivity_test_rest_call_success(request_type): "source_ip": "source_ip_value", "destination_ip": "destination_ip_value", "region": "region_value", + "source_geolocation_code": "source_geolocation_code_value", + "destination_geolocation_code": "destination_geolocation_code_value", }, "load_balancer": { "load_balancer_type": 1, @@ -5720,6 +5741,17 @@ def test_create_connectivity_test_rest_call_success(request_type): "destination_egress_location": { "metropolitan_area": "metropolitan_area_value" }, + "edge_responses": [ + { + "result": 1, + "sent_probe_count": 1721, + "successful_probe_count": 2367, + "probing_latency": {}, + "destination_egress_location": {}, + "destination_router": "destination_router_value", + } + ], + "probed_all_devices": True, }, "round_trip": True, "return_reachability_details": {}, @@ -6001,6 +6033,8 @@ def test_update_connectivity_test_rest_call_success(request_type): ], "service_account": "service_account_value", "psc_network_attachment_uri": "psc_network_attachment_uri_value", + "running": True, + "status": 1, }, "firewall": { "display_name": "display_name_value", @@ -6020,6 +6054,8 @@ def test_update_connectivity_test_rest_call_success(request_type): "policy": "policy_value", "policy_uri": "policy_uri_value", "firewall_rule_type": 1, + "policy_priority": 1649, + "target_type": 1, }, "route": { "route_type": 1, @@ -6074,6 +6110,11 @@ def test_update_connectivity_test_rest_call_success(request_type): "psc_service_attachment_uri": "psc_service_attachment_uri_value", "psc_google_api_target": "psc_google_api_target_value", }, + "hybrid_subnet": { + "display_name": "display_name_value", + "uri": "uri_value", + "region": "region_value", + }, "vpn_gateway": { "display_name": "display_name_value", "uri": "uri_value", @@ -6093,6 +6134,15 @@ def test_update_connectivity_test_rest_call_success(request_type): "region": "region_value", "routing_type": 1, }, + "interconnect_attachment": { + "display_name": "display_name_value", + "uri": "uri_value", + "interconnect_uri": "interconnect_uri_value", + "region": "region_value", + "cloud_router_uri": "cloud_router_uri_value", + "type_": 1, + "l2_attachment_matched_ip_address": "l2_attachment_matched_ip_address_value", + }, "vpc_connector": { "display_name": "display_name_value", "uri": "uri_value", @@ -6114,6 +6164,7 @@ def test_update_connectivity_test_rest_call_success(request_type): "ip_address": "ip_address_value", "storage_bucket": "storage_bucket_value", "psc_google_api_target": "psc_google_api_target_value", + "google_service_type": 1, }, "forward": { "target": 1, @@ -6135,6 +6186,8 @@ def test_update_connectivity_test_rest_call_success(request_type): "source_ip": "source_ip_value", "destination_ip": "destination_ip_value", "region": "region_value", + "source_geolocation_code": "source_geolocation_code_value", + "destination_geolocation_code": "destination_geolocation_code_value", }, "load_balancer": { "load_balancer_type": 1, @@ -6275,6 +6328,17 @@ def test_update_connectivity_test_rest_call_success(request_type): "destination_egress_location": { "metropolitan_area": "metropolitan_area_value" }, + "edge_responses": [ + { + "result": 1, + "sent_probe_count": 1721, + "successful_probe_count": 2367, + "probing_latency": {}, + "destination_egress_location": {}, + "destination_router": "destination_router_value", + } + ], + "probed_all_devices": True, }, "round_trip": True, "return_reachability_details": {}, diff --git a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_vpc_flow_logs_service.py b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_vpc_flow_logs_service.py index 21c693ca8f3e..fca1567cf62f 100644 --- a/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_vpc_flow_logs_service.py +++ b/packages/google-cloud-network-management/tests/unit/gapic/network_management_v1/test_vpc_flow_logs_service.py @@ -1765,8 +1765,9 @@ def test_get_vpc_flow_logs_config(request_type, transport: str = "grpc"): metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, metadata_fields=["metadata_fields_value"], filter_expr="filter_expr_value", + cross_project_metadata=vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED, target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, - interconnect_attachment="interconnect_attachment_value", + network="network_value", ) response = client.get_vpc_flow_logs_config(request) @@ -1792,6 +1793,10 @@ def test_get_vpc_flow_logs_config(request_type, transport: str = "grpc"): ) assert response.metadata_fields == ["metadata_fields_value"] assert response.filter_expr == "filter_expr_value" + assert ( + response.cross_project_metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED + ) assert ( response.target_resource_state == vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS @@ -1939,6 +1944,7 @@ async def test_get_vpc_flow_logs_config_async( metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, metadata_fields=["metadata_fields_value"], filter_expr="filter_expr_value", + cross_project_metadata=vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED, target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, ) ) @@ -1966,6 +1972,10 @@ async def test_get_vpc_flow_logs_config_async( ) assert response.metadata_fields == ["metadata_fields_value"] assert response.filter_expr == "filter_expr_value" + assert ( + response.cross_project_metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED + ) assert ( response.target_resource_state == vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS @@ -3213,13 +3223,87 @@ async def test_delete_vpc_flow_logs_config_flattened_error_async(): ) -def test_list_vpc_flow_logs_configs_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, + dict, + ], +) +def test_query_org_vpc_flow_logs_configs(request_type, transport: str = "grpc"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.query_org_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.QueryOrgVpcFlowLogsConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_query_org_vpc_flow_logs_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.query_org_vpc_flow_logs_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_query_org_vpc_flow_logs_configs_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -3228,7 +3312,7 @@ def test_list_vpc_flow_logs_configs_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_vpc_flow_logs_configs + client._transport.query_org_vpc_flow_logs_configs in client._transport._wrapped_methods ) @@ -3238,156 +3322,1496 @@ def test_list_vpc_flow_logs_configs_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_vpc_flow_logs_configs + client._transport.query_org_vpc_flow_logs_configs ] = mock_rpc - request = {} - client.list_vpc_flow_logs_configs(request) + client.query_org_vpc_flow_logs_configs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_vpc_flow_logs_configs(request) + client.query_org_vpc_flow_logs_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_vpc_flow_logs_configs_rest_required_fields( - request_type=vpc_flow_logs.ListVpcFlowLogsConfigsRequest, +@pytest.mark.asyncio +async def test_query_org_vpc_flow_logs_configs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.VpcFlowLogsServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.query_org_vpc_flow_logs_configs + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_vpc_flow_logs_configs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.query_org_vpc_flow_logs_configs + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.query_org_vpc_flow_logs_configs(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_vpc_flow_logs_configs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + await client.query_org_vpc_flow_logs_configs(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = VpcFlowLogsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_query_org_vpc_flow_logs_configs_async( + transport: str = "grpc_asyncio", + request_type=vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, +): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.query_org_vpc_flow_logs_configs(request) - # Convert return value to protobuf type - return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.QueryOrgVpcFlowLogsConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] - response = client.list_vpc_flow_logs_configs(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_query_org_vpc_flow_logs_configs_async_from_dict(): + await test_query_org_vpc_flow_logs_configs_async(request_type=dict) -def test_list_vpc_flow_logs_configs_rest_unset_required_fields(): - transport = transports.VpcFlowLogsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_query_org_vpc_flow_logs_configs_field_headers(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_vpc_flow_logs_configs._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() + request.parent = "parent_value" -def test_list_vpc_flow_logs_configs_rest_flattened(): - client = VpcFlowLogsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + call.return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse() + client.query_org_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_query_org_vpc_flow_logs_configs_field_headers_async(): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse() ) - mock_args.update(sample_request) + await client.query_org_vpc_flow_logs_configs(request) - # Wrap the value into a proper Response obj + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_query_org_vpc_flow_logs_configs_pager(transport_name: str = "grpc"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.query_org_vpc_flow_logs_configs( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.VpcFlowLogsConfig) for i in results + ) + + +def test_query_org_vpc_flow_logs_configs_pages(transport_name: str = "grpc"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.query_org_vpc_flow_logs_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_query_org_vpc_flow_logs_configs_async_pager(): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.query_org_vpc_flow_logs_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.VpcFlowLogsConfig) for i in responses + ) + + +@pytest.mark.asyncio +async def test_query_org_vpc_flow_logs_configs_async_pages(): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.query_org_vpc_flow_logs_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, + dict, + ], +) +def test_show_effective_flow_logs_configs(request_type, transport: str = "grpc"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.show_effective_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ShowEffectiveFlowLogsConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_show_effective_flow_logs_configs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest( + parent="parent_value", + resource="resource_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.show_effective_flow_logs_configs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest( + parent="parent_value", + resource="resource_value", + page_token="page_token_value", + filter="filter_value", + ) + + +def test_show_effective_flow_logs_configs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.show_effective_flow_logs_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.show_effective_flow_logs_configs + ] = mock_rpc + request = {} + client.show_effective_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.show_effective_flow_logs_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_show_effective_flow_logs_configs_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.show_effective_flow_logs_configs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.show_effective_flow_logs_configs + ] = mock_rpc + + request = {} + await client.show_effective_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.show_effective_flow_logs_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_show_effective_flow_logs_configs_async( + transport: str = "grpc_asyncio", + request_type=vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, +): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.show_effective_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ShowEffectiveFlowLogsConfigsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_show_effective_flow_logs_configs_async_from_dict(): + await test_show_effective_flow_logs_configs_async(request_type=dict) + + +def test_show_effective_flow_logs_configs_field_headers(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + call.return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse() + client.show_effective_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_show_effective_flow_logs_configs_field_headers_async(): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse() + ) + await client.show_effective_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_show_effective_flow_logs_configs_pager(transport_name: str = "grpc"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.show_effective_flow_logs_configs( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.EffectiveVpcFlowLogsConfig) + for i in results + ) + + +def test_show_effective_flow_logs_configs_pages(transport_name: str = "grpc"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + pages = list(client.show_effective_flow_logs_configs(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_show_effective_flow_logs_configs_async_pager(): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + async_pager = await client.show_effective_flow_logs_configs( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.EffectiveVpcFlowLogsConfig) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_show_effective_flow_logs_configs_async_pages(): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.show_effective_flow_logs_configs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_vpc_flow_logs_configs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_vpc_flow_logs_configs + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_vpc_flow_logs_configs + ] = mock_rpc + + request = {} + client.list_vpc_flow_logs_configs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_vpc_flow_logs_configs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_vpc_flow_logs_configs_rest_required_fields( + request_type=vpc_flow_logs.ListVpcFlowLogsConfigsRequest, +): + transport_class = transports.VpcFlowLogsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_vpc_flow_logs_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_vpc_flow_logs_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_vpc_flow_logs_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_vpc_flow_logs_configs_rest_unset_required_fields(): + transport = transports.VpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_vpc_flow_logs_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_vpc_flow_logs_configs_rest_flattened(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_vpc_flow_logs_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/vpcFlowLogsConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_vpc_flow_logs_configs_rest_flattened_error(transport: str = "rest"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_vpc_flow_logs_configs( + vpc_flow_logs.ListVpcFlowLogsConfigsRequest(), + parent="parent_value", + ) + + +def test_list_vpc_flow_logs_configs_rest_pager(transport: str = "rest"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ListVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + vpc_flow_logs.ListVpcFlowLogsConfigsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_vpc_flow_logs_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.VpcFlowLogsConfig) for i in results + ) + + pages = list(client.list_vpc_flow_logs_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_vpc_flow_logs_config + ] = mock_rpc + + request = {} + client.get_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_vpc_flow_logs_config_rest_required_fields( + request_type=vpc_flow_logs.GetVpcFlowLogsConfigRequest, +): + transport_class = transports.VpcFlowLogsServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_vpc_flow_logs_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_vpc_flow_logs_config_rest_unset_required_fields(): + transport = transports.VpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_vpc_flow_logs_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_vpc_flow_logs_config_rest_flattened(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_vpc_flow_logs_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/vpcFlowLogsConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_vpc_flow_logs_config( + vpc_flow_logs.GetVpcFlowLogsConfigRequest(), + name="name_value", + ) + + +def test_create_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_vpc_flow_logs_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_vpc_flow_logs_config + ] = mock_rpc + + request = {} + client.create_vpc_flow_logs_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_vpc_flow_logs_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_vpc_flow_logs_config_rest_required_fields( + request_type=vpc_flow_logs.CreateVpcFlowLogsConfigRequest, +): + transport_class = transports.VpcFlowLogsServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["vpc_flow_logs_config_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "vpcFlowLogsConfigId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "vpcFlowLogsConfigId" in jsonified_request + assert ( + jsonified_request["vpcFlowLogsConfigId"] + == request_init["vpc_flow_logs_config_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["vpcFlowLogsConfigId"] = "vpc_flow_logs_config_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("vpc_flow_logs_config_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "vpcFlowLogsConfigId" in jsonified_request + assert jsonified_request["vpcFlowLogsConfigId"] == "vpc_flow_logs_config_id_value" + + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_vpc_flow_logs_config(request) + + expected_params = [ + ( + "vpcFlowLogsConfigId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_vpc_flow_logs_config_rest_unset_required_fields(): + transport = transports.VpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_vpc_flow_logs_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("vpcFlowLogsConfigId",)) + & set( + ( + "parent", + "vpcFlowLogsConfigId", + "vpcFlowLogsConfig", + ) + ) + ) + + +def test_create_vpc_flow_logs_config_rest_flattened(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vpc_flow_logs.ListVpcFlowLogsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_vpc_flow_logs_configs(**mock_args) + client.create_vpc_flow_logs_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -3400,7 +4824,7 @@ def test_list_vpc_flow_logs_configs_rest_flattened(): ) -def test_list_vpc_flow_logs_configs_rest_flattened_error(transport: str = "rest"): +def test_create_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3409,78 +4833,17 @@ def test_list_vpc_flow_logs_configs_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_vpc_flow_logs_configs( - vpc_flow_logs.ListVpcFlowLogsConfigsRequest(), + client.create_vpc_flow_logs_config( + vpc_flow_logs.CreateVpcFlowLogsConfigRequest(), parent="parent_value", - ) - - -def test_list_vpc_flow_logs_configs_rest_pager(transport: str = "rest"): - client = VpcFlowLogsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - vpc_flow_logs.ListVpcFlowLogsConfigsResponse( - vpc_flow_logs_configs=[ - vpc_flow_logs_config.VpcFlowLogsConfig(), - vpc_flow_logs_config.VpcFlowLogsConfig(), - vpc_flow_logs_config.VpcFlowLogsConfig(), - ], - next_page_token="abc", - ), - vpc_flow_logs.ListVpcFlowLogsConfigsResponse( - vpc_flow_logs_configs=[], - next_page_token="def", - ), - vpc_flow_logs.ListVpcFlowLogsConfigsResponse( - vpc_flow_logs_configs=[ - vpc_flow_logs_config.VpcFlowLogsConfig(), - ], - next_page_token="ghi", - ), - vpc_flow_logs.ListVpcFlowLogsConfigsResponse( - vpc_flow_logs_configs=[ - vpc_flow_logs_config.VpcFlowLogsConfig(), - vpc_flow_logs_config.VpcFlowLogsConfig(), - ], + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" ), + vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - vpc_flow_logs.ListVpcFlowLogsConfigsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_vpc_flow_logs_configs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, vpc_flow_logs_config.VpcFlowLogsConfig) for i in results - ) - - pages = list(client.list_vpc_flow_logs_configs(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_get_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): +def test_update_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3495,7 +4858,7 @@ def test_get_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_vpc_flow_logs_config + client._transport.update_vpc_flow_logs_config in client._transport._wrapped_methods ) @@ -3505,29 +4868,32 @@ def test_get_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_vpc_flow_logs_config + client._transport.update_vpc_flow_logs_config ] = mock_rpc request = {} - client.get_vpc_flow_logs_config(request) + client.update_vpc_flow_logs_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_vpc_flow_logs_config(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_vpc_flow_logs_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_vpc_flow_logs_config_rest_required_fields( - request_type=vpc_flow_logs.GetVpcFlowLogsConfigRequest, +def test_update_vpc_flow_logs_config_rest_required_fields( + request_type=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, ): transport_class = transports.VpcFlowLogsServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3538,21 +4904,19 @@ def test_get_vpc_flow_logs_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + ).update_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" + # verify required fields with default values are now present unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + ).update_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3561,7 +4925,7 @@ def test_get_vpc_flow_logs_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3573,39 +4937,45 @@ def test_get_vpc_flow_logs_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_vpc_flow_logs_config(request) + response = client.update_vpc_flow_logs_config(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_vpc_flow_logs_config_rest_unset_required_fields(): +def test_update_vpc_flow_logs_config_rest_unset_required_fields(): transport = transports.VpcFlowLogsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_vpc_flow_logs_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_vpc_flow_logs_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "updateMask", + "vpcFlowLogsConfig", + ) + ) + ) -def test_get_vpc_flow_logs_config_rest_flattened(): +def test_update_vpc_flow_logs_config_rest_flattened(): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3614,43 +4984,46 @@ def test_get_vpc_flow_logs_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + "vpc_flow_logs_config": { + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_vpc_flow_logs_config(**mock_args) + client.update_vpc_flow_logs_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/vpcFlowLogsConfigs/*}" + "%s/v1/{vpc_flow_logs_config.name=projects/*/locations/*/vpcFlowLogsConfigs/*}" % client.transport._host, args[1], ) -def test_get_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): +def test_update_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3659,13 +5032,16 @@ def test_get_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_vpc_flow_logs_config( - vpc_flow_logs.GetVpcFlowLogsConfigRequest(), - name="name_value", + client.update_vpc_flow_logs_config( + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest(), + vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): +def test_delete_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3680,7 +5056,7 @@ def test_create_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_vpc_flow_logs_config + client._transport.delete_vpc_flow_logs_config in client._transport._wrapped_methods ) @@ -3690,11 +5066,11 @@ def test_create_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_vpc_flow_logs_config + client._transport.delete_vpc_flow_logs_config ] = mock_rpc request = {} - client.create_vpc_flow_logs_config(request) + client.delete_vpc_flow_logs_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -3703,21 +5079,20 @@ def test_create_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_vpc_flow_logs_config(request) + client.delete_vpc_flow_logs_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_vpc_flow_logs_config_rest_required_fields( - request_type=vpc_flow_logs.CreateVpcFlowLogsConfigRequest, +def test_delete_vpc_flow_logs_config_rest_required_fields( + request_type=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, ): transport_class = transports.VpcFlowLogsServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["vpc_flow_logs_config_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3725,35 +5100,24 @@ def test_create_vpc_flow_logs_config_rest_required_fields( ) # verify fields with default values are dropped - assert "vpcFlowLogsConfigId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + ).delete_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "vpcFlowLogsConfigId" in jsonified_request - assert ( - jsonified_request["vpcFlowLogsConfigId"] - == request_init["vpc_flow_logs_config_id"] - ) - jsonified_request["parent"] = "parent_value" - jsonified_request["vpcFlowLogsConfigId"] = "vpc_flow_logs_config_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("vpc_flow_logs_config_id",)) + ).delete_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "vpcFlowLogsConfigId" in jsonified_request - assert jsonified_request["vpcFlowLogsConfigId"] == "vpc_flow_logs_config_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3774,10 +5138,9 @@ def test_create_vpc_flow_logs_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -3788,38 +5151,23 @@ def test_create_vpc_flow_logs_config_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_vpc_flow_logs_config(request) + response = client.delete_vpc_flow_logs_config(request) - expected_params = [ - ( - "vpcFlowLogsConfigId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_vpc_flow_logs_config_rest_unset_required_fields(): +def test_delete_vpc_flow_logs_config_rest_unset_required_fields(): transport = transports.VpcFlowLogsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_vpc_flow_logs_config._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("vpcFlowLogsConfigId",)) - & set( - ( - "parent", - "vpcFlowLogsConfigId", - "vpcFlowLogsConfig", - ) - ) - ) + unset_fields = transport.delete_vpc_flow_logs_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_vpc_flow_logs_config_rest_flattened(): +def test_delete_vpc_flow_logs_config_rest_flattened(): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3831,15 +5179,13 @@ def test_create_vpc_flow_logs_config_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( - name="name_value" - ), - vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -3851,20 +5197,20 @@ def test_create_vpc_flow_logs_config_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_vpc_flow_logs_config(**mock_args) + client.delete_vpc_flow_logs_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/vpcFlowLogsConfigs" + "%s/v1/{name=projects/*/locations/*/vpcFlowLogsConfigs/*}" % client.transport._host, args[1], ) -def test_create_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): +def test_delete_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3873,17 +5219,13 @@ def test_create_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_vpc_flow_logs_config( - vpc_flow_logs.CreateVpcFlowLogsConfigRequest(), - parent="parent_value", - vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( - name="name_value" - ), - vpc_flow_logs_config_id="vpc_flow_logs_config_id_value", + client.delete_vpc_flow_logs_config( + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest(), + name="name_value", ) -def test_update_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): +def test_query_org_vpc_flow_logs_configs_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3898,7 +5240,7 @@ def test_update_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_vpc_flow_logs_config + client._transport.query_org_vpc_flow_logs_configs in client._transport._wrapped_methods ) @@ -3908,32 +5250,29 @@ def test_update_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_vpc_flow_logs_config + client._transport.query_org_vpc_flow_logs_configs ] = mock_rpc request = {} - client.update_vpc_flow_logs_config(request) + client.query_org_vpc_flow_logs_configs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_vpc_flow_logs_config(request) + client.query_org_vpc_flow_logs_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_vpc_flow_logs_config_rest_required_fields( - request_type=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, +def test_query_org_vpc_flow_logs_configs_rest_required_fields( + request_type=vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, ): transport_class = transports.VpcFlowLogsServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -3944,19 +5283,29 @@ def test_update_vpc_flow_logs_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + ).query_org_vpc_flow_logs_configs._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + ).query_org_vpc_flow_logs_configs._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3965,7 +5314,7 @@ def test_update_vpc_flow_logs_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3977,111 +5326,120 @@ def test_update_vpc_flow_logs_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_vpc_flow_logs_config(request) + response = client.query_org_vpc_flow_logs_configs(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_vpc_flow_logs_config_rest_unset_required_fields(): +def test_query_org_vpc_flow_logs_configs_rest_unset_required_fields(): transport = transports.VpcFlowLogsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_vpc_flow_logs_config._get_unset_required_fields({}) + unset_fields = transport.query_org_vpc_flow_logs_configs._get_unset_required_fields( + {} + ) assert set(unset_fields) == ( - set(("updateMask",)) - & set( + set( ( - "updateMask", - "vpcFlowLogsConfig", + "filter", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_update_vpc_flow_logs_config_rest_flattened(): +def test_query_org_vpc_flow_logs_configs_rest_pager(transport: str = "rest"): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "vpc_flow_logs_config": { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( - name="name_value" + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + vpc_flow_logs_configs=[ + vpc_flow_logs_config.VpcFlowLogsConfig(), + vpc_flow_logs_config.VpcFlowLogsConfig(), + ], ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_vpc_flow_logs_config(**mock_args) + # Two responses for two calls + response = response + response - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{vpc_flow_logs_config.name=projects/*/locations/*/vpcFlowLogsConfigs/*}" - % client.transport._host, - args[1], + # Wrap the values into proper Response objs + response = tuple( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse.to_json(x) + for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_update_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): - client = VpcFlowLogsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + pager = client.query_org_vpc_flow_logs_configs(request=sample_request) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_vpc_flow_logs_config( - vpc_flow_logs.UpdateVpcFlowLogsConfigRequest(), - vpc_flow_logs_config=gcn_vpc_flow_logs_config.VpcFlowLogsConfig( - name="name_value" - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.VpcFlowLogsConfig) for i in results + ) + + pages = list( + client.query_org_vpc_flow_logs_configs(request=sample_request).pages ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_delete_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): +def test_show_effective_flow_logs_configs_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4096,7 +5454,7 @@ def test_delete_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_vpc_flow_logs_config + client._transport.show_effective_flow_logs_configs in client._transport._wrapped_methods ) @@ -4106,33 +5464,30 @@ def test_delete_vpc_flow_logs_config_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_vpc_flow_logs_config + client._transport.show_effective_flow_logs_configs ] = mock_rpc request = {} - client.delete_vpc_flow_logs_config(request) + client.show_effective_flow_logs_configs(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_vpc_flow_logs_config(request) + client.show_effective_flow_logs_configs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_vpc_flow_logs_config_rest_required_fields( - request_type=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, +def test_show_effective_flow_logs_configs_rest_required_fields( + request_type=vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, ): transport_class = transports.VpcFlowLogsServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["resource"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -4140,24 +5495,39 @@ def test_delete_vpc_flow_logs_config_rest_required_fields( ) # verify fields with default values are dropped + assert "resource" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + ).show_effective_flow_logs_configs._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "resource" in jsonified_request + assert jsonified_request["resource"] == request_init["resource"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["resource"] = "resource_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_vpc_flow_logs_config._get_unset_required_fields(jsonified_request) + ).show_effective_flow_logs_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "resource", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4166,7 +5536,7 @@ def test_delete_vpc_flow_logs_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -4178,91 +5548,130 @@ def test_delete_vpc_flow_logs_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_vpc_flow_logs_config(request) + response = client.show_effective_flow_logs_configs(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "resource", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_vpc_flow_logs_config_rest_unset_required_fields(): +def test_show_effective_flow_logs_configs_rest_unset_required_fields(): transport = transports.VpcFlowLogsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_vpc_flow_logs_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = ( + transport.show_effective_flow_logs_configs._get_unset_required_fields({}) + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "resource", + ) + ) + & set( + ( + "parent", + "resource", + ) + ) + ) -def test_delete_vpc_flow_logs_config_rest_flattened(): +def test_show_effective_flow_logs_configs_rest_pager(transport: str = "rest"): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" - } + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="abc", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[], + next_page_token="def", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + next_page_token="ghi", + ), + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + effective_flow_logs_configs=[ + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + vpc_flow_logs_config.EffectiveVpcFlowLogsConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Wrap the values into proper Response objs + response = tuple( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse.to_json(x) + for x in response ) - mock_args.update(sample_request) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + sample_request = {"parent": "projects/sample1/locations/sample2"} - client.delete_vpc_flow_logs_config(**mock_args) + pager = client.show_effective_flow_logs_configs(request=sample_request) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/vpcFlowLogsConfigs/*}" - % client.transport._host, - args[1], + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, vpc_flow_logs_config.EffectiveVpcFlowLogsConfig) + for i in results ) - -def test_delete_vpc_flow_logs_config_rest_flattened_error(transport: str = "rest"): - client = VpcFlowLogsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_vpc_flow_logs_config( - vpc_flow_logs.DeleteVpcFlowLogsConfigRequest(), - name="name_value", + pages = list( + client.show_effective_flow_logs_configs(request=sample_request).pages ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_credentials_transport_error(): @@ -4486,6 +5895,52 @@ def test_delete_vpc_flow_logs_config_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_org_vpc_flow_logs_configs_empty_call_grpc(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + call.return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse() + client.query_org_vpc_flow_logs_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_show_effective_flow_logs_configs_empty_call_grpc(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + call.return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse() + client.show_effective_flow_logs_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = VpcFlowLogsServiceAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -4554,15 +6009,70 @@ async def test_get_vpc_flow_logs_config_empty_call_grpc_asyncio(): metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, metadata_fields=["metadata_fields_value"], filter_expr="filter_expr_value", + cross_project_metadata=vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED, target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, ) ) - await client.get_vpc_flow_logs_config(request=None) + await client.get_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_vpc_flow_logs_config_empty_call_grpc_asyncio(): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_vpc_flow_logs_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_vpc_flow_logs_config_empty_call_grpc_asyncio(): + client = VpcFlowLogsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_vpc_flow_logs_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_vpc_flow_logs_config(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + request_msg = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() assert args[0] == request_msg @@ -4570,7 +6080,7 @@ async def test_get_vpc_flow_logs_config_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_create_vpc_flow_logs_config_empty_call_grpc_asyncio(): +async def test_delete_vpc_flow_logs_config_empty_call_grpc_asyncio(): client = VpcFlowLogsServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", @@ -4578,18 +6088,18 @@ async def test_create_vpc_flow_logs_config_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_vpc_flow_logs_config), "__call__" + type(client.transport.delete_vpc_flow_logs_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - await client.create_vpc_flow_logs_config(request=None) + await client.delete_vpc_flow_logs_config(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + request_msg = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() assert args[0] == request_msg @@ -4597,7 +6107,7 @@ async def test_create_vpc_flow_logs_config_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_update_vpc_flow_logs_config_empty_call_grpc_asyncio(): +async def test_query_org_vpc_flow_logs_configs_empty_call_grpc_asyncio(): client = VpcFlowLogsServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", @@ -4605,18 +6115,21 @@ async def test_update_vpc_flow_logs_config_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_vpc_flow_logs_config), "__call__" + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - await client.update_vpc_flow_logs_config(request=None) + await client.query_org_vpc_flow_logs_configs(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + request_msg = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() assert args[0] == request_msg @@ -4624,7 +6137,7 @@ async def test_update_vpc_flow_logs_config_empty_call_grpc_asyncio(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio -async def test_delete_vpc_flow_logs_config_empty_call_grpc_asyncio(): +async def test_show_effective_flow_logs_configs_empty_call_grpc_asyncio(): client = VpcFlowLogsServiceAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", @@ -4632,18 +6145,21 @@ async def test_delete_vpc_flow_logs_config_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.delete_vpc_flow_logs_config), "__call__" + type(client.transport.show_effective_flow_logs_configs), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - await client.delete_vpc_flow_logs_config(request=None) + await client.show_effective_flow_logs_configs(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + request_msg = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() assert args[0] == request_msg @@ -4829,64 +6345,286 @@ def test_get_vpc_flow_logs_config_rest_call_success(request_type): credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" - } + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = vpc_flow_logs_config.VpcFlowLogsConfig( + name="name_value", + description="description_value", + state=vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED, + aggregation_interval=vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC, + flow_sampling=0.1394, + metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, + metadata_fields=["metadata_fields_value"], + filter_expr="filter_expr_value", + cross_project_metadata=vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED, + target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, + network="network_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_vpc_flow_logs_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, vpc_flow_logs_config.VpcFlowLogsConfig) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED + assert ( + response.aggregation_interval + == vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC + ) + assert math.isclose(response.flow_sampling, 0.1394, rel_tol=1e-6) + assert ( + response.metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA + ) + assert response.metadata_fields == ["metadata_fields_value"] + assert response.filter_expr == "filter_expr_value" + assert ( + response.cross_project_metadata + == vpc_flow_logs_config.VpcFlowLogsConfig.CrossProjectMetadata.CROSS_PROJECT_METADATA_ENABLED + ) + assert ( + response.target_resource_state + == vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_vpc_flow_logs_config_rest_interceptors(null_interceptor): + transport = transports.VpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VpcFlowLogsServiceRestInterceptor(), + ) + client = VpcFlowLogsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.VpcFlowLogsServiceRestInterceptor, "post_get_vpc_flow_logs_config" + ) as post, mock.patch.object( + transports.VpcFlowLogsServiceRestInterceptor, + "post_get_vpc_flow_logs_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.VpcFlowLogsServiceRestInterceptor, "pre_get_vpc_flow_logs_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = vpc_flow_logs.GetVpcFlowLogsConfigRequest.pb( + vpc_flow_logs.GetVpcFlowLogsConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = vpc_flow_logs_config.VpcFlowLogsConfig.to_json( + vpc_flow_logs_config.VpcFlowLogsConfig() + ) + req.return_value.content = return_value + + request = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = vpc_flow_logs_config.VpcFlowLogsConfig() + post_with_metadata.return_value = ( + vpc_flow_logs_config.VpcFlowLogsConfig(), + metadata, + ) + + client.get_vpc_flow_logs_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_vpc_flow_logs_config_rest_bad_request( + request_type=vpc_flow_logs.CreateVpcFlowLogsConfigRequest, +): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_vpc_flow_logs_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.CreateVpcFlowLogsConfigRequest, + dict, + ], +) +def test_create_vpc_flow_logs_config_rest_call_success(request_type): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["vpc_flow_logs_config"] = { + "name": "name_value", + "description": "description_value", + "state": 1, + "aggregation_interval": 1, + "flow_sampling": 0.1394, + "metadata": 1, + "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], + "filter_expr": "filter_expr_value", + "cross_project_metadata": 1, + "target_resource_state": 1, + "network": "network_value", + "subnet": "subnet_value", + "interconnect_attachment": "interconnect_attachment_value", + "vpn_tunnel": "vpn_tunnel_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = vpc_flow_logs.CreateVpcFlowLogsConfigRequest.meta.fields[ + "vpc_flow_logs_config" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "vpc_flow_logs_config" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpc_flow_logs_config"][field])): + del request_init["vpc_flow_logs_config"][field][i][subfield] + else: + del request_init["vpc_flow_logs_config"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = vpc_flow_logs_config.VpcFlowLogsConfig( - name="name_value", - description="description_value", - state=vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED, - aggregation_interval=vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC, - flow_sampling=0.1394, - metadata=vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA, - metadata_fields=["metadata_fields_value"], - filter_expr="filter_expr_value", - target_resource_state=vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS, - interconnect_attachment="interconnect_attachment_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = vpc_flow_logs_config.VpcFlowLogsConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_vpc_flow_logs_config(request) + response = client.create_vpc_flow_logs_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, vpc_flow_logs_config.VpcFlowLogsConfig) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == vpc_flow_logs_config.VpcFlowLogsConfig.State.ENABLED - assert ( - response.aggregation_interval - == vpc_flow_logs_config.VpcFlowLogsConfig.AggregationInterval.INTERVAL_5_SEC - ) - assert math.isclose(response.flow_sampling, 0.1394, rel_tol=1e-6) - assert ( - response.metadata - == vpc_flow_logs_config.VpcFlowLogsConfig.Metadata.INCLUDE_ALL_METADATA - ) - assert response.metadata_fields == ["metadata_fields_value"] - assert response.filter_expr == "filter_expr_value" - assert ( - response.target_resource_state - == vpc_flow_logs_config.VpcFlowLogsConfig.TargetResourceState.TARGET_RESOURCE_EXISTS - ) + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_vpc_flow_logs_config_rest_interceptors(null_interceptor): +def test_create_vpc_flow_logs_config_rest_interceptors(null_interceptor): transport = transports.VpcFlowLogsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -4900,18 +6638,20 @@ def test_get_vpc_flow_logs_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.VpcFlowLogsServiceRestInterceptor, "post_get_vpc_flow_logs_config" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VpcFlowLogsServiceRestInterceptor, "post_create_vpc_flow_logs_config" ) as post, mock.patch.object( transports.VpcFlowLogsServiceRestInterceptor, - "post_get_vpc_flow_logs_config_with_metadata", + "post_create_vpc_flow_logs_config_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.VpcFlowLogsServiceRestInterceptor, "pre_get_vpc_flow_logs_config" + transports.VpcFlowLogsServiceRestInterceptor, "pre_create_vpc_flow_logs_config" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = vpc_flow_logs.GetVpcFlowLogsConfigRequest.pb( - vpc_flow_logs.GetVpcFlowLogsConfigRequest() + pb_message = vpc_flow_logs.CreateVpcFlowLogsConfigRequest.pb( + vpc_flow_logs.CreateVpcFlowLogsConfigRequest() ) transcode.return_value = { "method": "post", @@ -4923,24 +6663,19 @@ def test_get_vpc_flow_logs_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = vpc_flow_logs_config.VpcFlowLogsConfig.to_json( - vpc_flow_logs_config.VpcFlowLogsConfig() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = vpc_flow_logs.GetVpcFlowLogsConfigRequest() + request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = vpc_flow_logs_config.VpcFlowLogsConfig() - post_with_metadata.return_value = ( - vpc_flow_logs_config.VpcFlowLogsConfig(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_vpc_flow_logs_config( + client.create_vpc_flow_logs_config( request, metadata=[ ("key", "val"), @@ -4953,14 +6688,18 @@ def test_get_vpc_flow_logs_config_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_vpc_flow_logs_config_rest_bad_request( - request_type=vpc_flow_logs.CreateVpcFlowLogsConfigRequest, +def test_update_vpc_flow_logs_config_rest_bad_request( + request_type=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, ): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "vpc_flow_logs_config": { + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4975,25 +6714,29 @@ def test_create_vpc_flow_logs_config_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_vpc_flow_logs_config(request) + client.update_vpc_flow_logs_config(request) @pytest.mark.parametrize( "request_type", [ - vpc_flow_logs.CreateVpcFlowLogsConfigRequest, + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, dict, ], ) -def test_create_vpc_flow_logs_config_rest_call_success(request_type): +def test_update_vpc_flow_logs_config_rest_call_success(request_type): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "vpc_flow_logs_config": { + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + } request_init["vpc_flow_logs_config"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3", "description": "description_value", "state": 1, "aggregation_interval": 1, @@ -5001,7 +6744,10 @@ def test_create_vpc_flow_logs_config_rest_call_success(request_type): "metadata": 1, "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], "filter_expr": "filter_expr_value", + "cross_project_metadata": 1, "target_resource_state": 1, + "network": "network_value", + "subnet": "subnet_value", "interconnect_attachment": "interconnect_attachment_value", "vpn_tunnel": "vpn_tunnel_value", "labels": {}, @@ -5013,7 +6759,7 @@ def test_create_vpc_flow_logs_config_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = vpc_flow_logs.CreateVpcFlowLogsConfigRequest.meta.fields[ + test_field = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.meta.fields[ "vpc_flow_logs_config" ] @@ -5067,18 +6813,146 @@ def get_message_fields(field): } ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["vpc_flow_logs_config"][field])): - del request_init["vpc_flow_logs_config"][field][i][subfield] - else: - del request_init["vpc_flow_logs_config"][field][subfield] + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["vpc_flow_logs_config"][field])): + del request_init["vpc_flow_logs_config"][field][i][subfield] + else: + del request_init["vpc_flow_logs_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_vpc_flow_logs_config(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_vpc_flow_logs_config_rest_interceptors(null_interceptor): + transport = transports.VpcFlowLogsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.VpcFlowLogsServiceRestInterceptor(), + ) + client = VpcFlowLogsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.VpcFlowLogsServiceRestInterceptor, "post_update_vpc_flow_logs_config" + ) as post, mock.patch.object( + transports.VpcFlowLogsServiceRestInterceptor, + "post_update_vpc_flow_logs_config_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.VpcFlowLogsServiceRestInterceptor, "pre_update_vpc_flow_logs_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.pb( + vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_vpc_flow_logs_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_vpc_flow_logs_config_rest_bad_request( + request_type=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, +): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_vpc_flow_logs_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, + dict, + ], +) +def test_delete_vpc_flow_logs_config_rest_call_success(request_type): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5093,14 +6967,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_vpc_flow_logs_config(request) + response = client.delete_vpc_flow_logs_config(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_vpc_flow_logs_config_rest_interceptors(null_interceptor): +def test_delete_vpc_flow_logs_config_rest_interceptors(null_interceptor): transport = transports.VpcFlowLogsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5116,18 +6990,18 @@ def test_create_vpc_flow_logs_config_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.VpcFlowLogsServiceRestInterceptor, "post_create_vpc_flow_logs_config" + transports.VpcFlowLogsServiceRestInterceptor, "post_delete_vpc_flow_logs_config" ) as post, mock.patch.object( transports.VpcFlowLogsServiceRestInterceptor, - "post_create_vpc_flow_logs_config_with_metadata", + "post_delete_vpc_flow_logs_config_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.VpcFlowLogsServiceRestInterceptor, "pre_create_vpc_flow_logs_config" + transports.VpcFlowLogsServiceRestInterceptor, "pre_delete_vpc_flow_logs_config" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = vpc_flow_logs.CreateVpcFlowLogsConfigRequest.pb( - vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + pb_message = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest.pb( + vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() ) transcode.return_value = { "method": "post", @@ -5142,7 +7016,7 @@ def test_create_vpc_flow_logs_config_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = vpc_flow_logs.CreateVpcFlowLogsConfigRequest() + request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -5151,7 +7025,7 @@ def test_create_vpc_flow_logs_config_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_vpc_flow_logs_config( + client.delete_vpc_flow_logs_config( request, metadata=[ ("key", "val"), @@ -5164,18 +7038,14 @@ def test_create_vpc_flow_logs_config_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_vpc_flow_logs_config_rest_bad_request( - request_type=vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, +def test_query_org_vpc_flow_logs_configs_rest_bad_request( + request_type=vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, ): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "vpc_flow_logs_config": { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" - } - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5190,136 +7060,53 @@ def test_update_vpc_flow_logs_config_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_vpc_flow_logs_config(request) + client.query_org_vpc_flow_logs_configs(request) @pytest.mark.parametrize( "request_type", [ - vpc_flow_logs.UpdateVpcFlowLogsConfigRequest, + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest, dict, ], ) -def test_update_vpc_flow_logs_config_rest_call_success(request_type): +def test_query_org_vpc_flow_logs_configs_rest_call_success(request_type): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "vpc_flow_logs_config": { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" - } - } - request_init["vpc_flow_logs_config"] = { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3", - "description": "description_value", - "state": 1, - "aggregation_interval": 1, - "flow_sampling": 0.1394, - "metadata": 1, - "metadata_fields": ["metadata_fields_value1", "metadata_fields_value2"], - "filter_expr": "filter_expr_value", - "target_resource_state": 1, - "interconnect_attachment": "interconnect_attachment_value", - "vpn_tunnel": "vpn_tunnel_value", - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.meta.fields[ - "vpc_flow_logs_config" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "vpc_flow_logs_config" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["vpc_flow_logs_config"][field])): - del request_init["vpc_flow_logs_config"][field][i][subfield] - else: - del request_init["vpc_flow_logs_config"][field][subfield] + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_vpc_flow_logs_config(request) + response = client.query_org_vpc_flow_logs_configs(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.QueryOrgVpcFlowLogsConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_vpc_flow_logs_config_rest_interceptors(null_interceptor): +def test_query_org_vpc_flow_logs_configs_rest_interceptors(null_interceptor): transport = transports.VpcFlowLogsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5333,20 +7120,20 @@ def test_update_vpc_flow_logs_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VpcFlowLogsServiceRestInterceptor, "post_update_vpc_flow_logs_config" + transports.VpcFlowLogsServiceRestInterceptor, + "post_query_org_vpc_flow_logs_configs", ) as post, mock.patch.object( transports.VpcFlowLogsServiceRestInterceptor, - "post_update_vpc_flow_logs_config_with_metadata", + "post_query_org_vpc_flow_logs_configs_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.VpcFlowLogsServiceRestInterceptor, "pre_update_vpc_flow_logs_config" + transports.VpcFlowLogsServiceRestInterceptor, + "pre_query_org_vpc_flow_logs_configs", ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest.pb( - vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + pb_message = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest.pb( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() ) transcode.return_value = { "method": "post", @@ -5358,19 +7145,24 @@ def test_update_vpc_flow_logs_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse.to_json( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse() + ) req.return_value.content = return_value - request = vpc_flow_logs.UpdateVpcFlowLogsConfigRequest() + request = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse() + post_with_metadata.return_value = ( + vpc_flow_logs.QueryOrgVpcFlowLogsConfigsResponse(), + metadata, + ) - client.update_vpc_flow_logs_config( + client.query_org_vpc_flow_logs_configs( request, metadata=[ ("key", "val"), @@ -5383,16 +7175,14 @@ def test_update_vpc_flow_logs_config_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_vpc_flow_logs_config_rest_bad_request( - request_type=vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, +def test_show_effective_flow_logs_configs_rest_bad_request( + request_type=vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, ): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5407,47 +7197,55 @@ def test_delete_vpc_flow_logs_config_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_vpc_flow_logs_config(request) + client.show_effective_flow_logs_configs(request) @pytest.mark.parametrize( "request_type", [ - vpc_flow_logs.DeleteVpcFlowLogsConfigRequest, + vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest, dict, ], ) -def test_delete_vpc_flow_logs_config_rest_call_success(request_type): +def test_show_effective_flow_logs_configs_rest_call_success(request_type): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/vpcFlowLogsConfigs/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_vpc_flow_logs_config(request) + response = client.show_effective_flow_logs_configs(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ShowEffectiveFlowLogsConfigsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_vpc_flow_logs_config_rest_interceptors(null_interceptor): +def test_show_effective_flow_logs_configs_rest_interceptors(null_interceptor): transport = transports.VpcFlowLogsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5461,20 +7259,20 @@ def test_delete_vpc_flow_logs_config_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.VpcFlowLogsServiceRestInterceptor, "post_delete_vpc_flow_logs_config" + transports.VpcFlowLogsServiceRestInterceptor, + "post_show_effective_flow_logs_configs", ) as post, mock.patch.object( transports.VpcFlowLogsServiceRestInterceptor, - "post_delete_vpc_flow_logs_config_with_metadata", + "post_show_effective_flow_logs_configs_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.VpcFlowLogsServiceRestInterceptor, "pre_delete_vpc_flow_logs_config" + transports.VpcFlowLogsServiceRestInterceptor, + "pre_show_effective_flow_logs_configs", ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest.pb( - vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + pb_message = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest.pb( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() ) transcode.return_value = { "method": "post", @@ -5486,19 +7284,24 @@ def test_delete_vpc_flow_logs_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse.to_json( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse() + ) req.return_value.content = return_value - request = vpc_flow_logs.DeleteVpcFlowLogsConfigRequest() + request = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse() + post_with_metadata.return_value = ( + vpc_flow_logs.ShowEffectiveFlowLogsConfigsResponse(), + metadata, + ) - client.delete_vpc_flow_logs_config( + client.show_effective_flow_logs_configs( request, metadata=[ ("key", "val"), @@ -6191,6 +7994,50 @@ def test_delete_vpc_flow_logs_config_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_org_vpc_flow_logs_configs_empty_call_rest(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_org_vpc_flow_logs_configs), "__call__" + ) as call: + client.query_org_vpc_flow_logs_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.QueryOrgVpcFlowLogsConfigsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_show_effective_flow_logs_configs_empty_call_rest(): + client = VpcFlowLogsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.show_effective_flow_logs_configs), "__call__" + ) as call: + client.show_effective_flow_logs_configs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = vpc_flow_logs.ShowEffectiveFlowLogsConfigsRequest() + + assert args[0] == request_msg + + def test_vpc_flow_logs_service_rest_lro_client(): client = VpcFlowLogsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6246,6 +8093,8 @@ def test_vpc_flow_logs_service_base_transport(): "create_vpc_flow_logs_config", "update_vpc_flow_logs_config", "delete_vpc_flow_logs_config", + "query_org_vpc_flow_logs_configs", + "show_effective_flow_logs_configs", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -6535,6 +8384,12 @@ def test_vpc_flow_logs_service_client_transport_session_collision(transport_name session1 = client1.transport.delete_vpc_flow_logs_config._session session2 = client2.transport.delete_vpc_flow_logs_config._session assert session1 != session2 + session1 = client1.transport.query_org_vpc_flow_logs_configs._session + session2 = client2.transport.query_org_vpc_flow_logs_configs._session + assert session1 != session2 + session1 = client1.transport.show_effective_flow_logs_configs._session + session2 = client2.transport.show_effective_flow_logs_configs._session + assert session1 != session2 def test_vpc_flow_logs_service_grpc_transport_channel(): diff --git a/packages/google-cloud-network-services/google/cloud/network_services/__init__.py b/packages/google-cloud-network-services/google/cloud/network_services/__init__.py index 6c58c0d57e41..2ca85b41a070 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services/__init__.py +++ b/packages/google-cloud-network-services/google/cloud/network_services/__init__.py @@ -39,26 +39,33 @@ from google.cloud.network_services_v1.types.dep import ( AuthzExtension, CreateAuthzExtensionRequest, + CreateLbEdgeExtensionRequest, CreateLbRouteExtensionRequest, CreateLbTrafficExtensionRequest, DeleteAuthzExtensionRequest, + DeleteLbEdgeExtensionRequest, DeleteLbRouteExtensionRequest, DeleteLbTrafficExtensionRequest, EventType, ExtensionChain, GetAuthzExtensionRequest, + GetLbEdgeExtensionRequest, GetLbRouteExtensionRequest, GetLbTrafficExtensionRequest, + LbEdgeExtension, LbRouteExtension, LbTrafficExtension, ListAuthzExtensionsRequest, ListAuthzExtensionsResponse, + ListLbEdgeExtensionsRequest, + ListLbEdgeExtensionsResponse, ListLbRouteExtensionsRequest, ListLbRouteExtensionsResponse, ListLbTrafficExtensionsRequest, ListLbTrafficExtensionsResponse, LoadBalancingScheme, UpdateAuthzExtensionRequest, + UpdateLbEdgeExtensionRequest, UpdateLbRouteExtensionRequest, UpdateLbTrafficExtensionRequest, WireFormat, @@ -182,24 +189,31 @@ "EnvoyHeaders", "AuthzExtension", "CreateAuthzExtensionRequest", + "CreateLbEdgeExtensionRequest", "CreateLbRouteExtensionRequest", "CreateLbTrafficExtensionRequest", "DeleteAuthzExtensionRequest", + "DeleteLbEdgeExtensionRequest", "DeleteLbRouteExtensionRequest", "DeleteLbTrafficExtensionRequest", "ExtensionChain", "GetAuthzExtensionRequest", + "GetLbEdgeExtensionRequest", "GetLbRouteExtensionRequest", "GetLbTrafficExtensionRequest", + "LbEdgeExtension", "LbRouteExtension", "LbTrafficExtension", "ListAuthzExtensionsRequest", "ListAuthzExtensionsResponse", + "ListLbEdgeExtensionsRequest", + "ListLbEdgeExtensionsResponse", "ListLbRouteExtensionsRequest", "ListLbRouteExtensionsResponse", "ListLbTrafficExtensionsRequest", "ListLbTrafficExtensionsResponse", "UpdateAuthzExtensionRequest", + "UpdateLbEdgeExtensionRequest", "UpdateLbRouteExtensionRequest", "UpdateLbTrafficExtensionRequest", "EventType", diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/__init__.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/__init__.py index 6f495b78a329..cb27a8b94c02 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/__init__.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/__init__.py @@ -29,26 +29,33 @@ from .types.dep import ( AuthzExtension, CreateAuthzExtensionRequest, + CreateLbEdgeExtensionRequest, CreateLbRouteExtensionRequest, CreateLbTrafficExtensionRequest, DeleteAuthzExtensionRequest, + DeleteLbEdgeExtensionRequest, DeleteLbRouteExtensionRequest, DeleteLbTrafficExtensionRequest, EventType, ExtensionChain, GetAuthzExtensionRequest, + GetLbEdgeExtensionRequest, GetLbRouteExtensionRequest, GetLbTrafficExtensionRequest, + LbEdgeExtension, LbRouteExtension, LbTrafficExtension, ListAuthzExtensionsRequest, ListAuthzExtensionsResponse, + ListLbEdgeExtensionsRequest, + ListLbEdgeExtensionsResponse, ListLbRouteExtensionsRequest, ListLbRouteExtensionsResponse, ListLbTrafficExtensionsRequest, ListLbTrafficExtensionsResponse, LoadBalancingScheme, UpdateAuthzExtensionRequest, + UpdateLbEdgeExtensionRequest, UpdateLbRouteExtensionRequest, UpdateLbTrafficExtensionRequest, WireFormat, @@ -170,6 +177,7 @@ "CreateGatewayRequest", "CreateGrpcRouteRequest", "CreateHttpRouteRequest", + "CreateLbEdgeExtensionRequest", "CreateLbRouteExtensionRequest", "CreateLbTrafficExtensionRequest", "CreateMeshRequest", @@ -184,6 +192,7 @@ "DeleteGatewayRequest", "DeleteGrpcRouteRequest", "DeleteHttpRouteRequest", + "DeleteLbEdgeExtensionRequest", "DeleteLbRouteExtensionRequest", "DeleteLbTrafficExtensionRequest", "DeleteMeshRequest", @@ -207,6 +216,7 @@ "GetGatewayRouteViewRequest", "GetGrpcRouteRequest", "GetHttpRouteRequest", + "GetLbEdgeExtensionRequest", "GetLbRouteExtensionRequest", "GetLbTrafficExtensionRequest", "GetMeshRequest", @@ -219,6 +229,7 @@ "GetWasmPluginVersionRequest", "GrpcRoute", "HttpRoute", + "LbEdgeExtension", "LbRouteExtension", "LbTrafficExtension", "ListAuthzExtensionsRequest", @@ -233,6 +244,8 @@ "ListGrpcRoutesResponse", "ListHttpRoutesRequest", "ListHttpRoutesResponse", + "ListLbEdgeExtensionsRequest", + "ListLbEdgeExtensionsResponse", "ListLbRouteExtensionsRequest", "ListLbRouteExtensionsResponse", "ListLbTrafficExtensionsRequest", @@ -268,6 +281,7 @@ "UpdateGatewayRequest", "UpdateGrpcRouteRequest", "UpdateHttpRouteRequest", + "UpdateLbEdgeExtensionRequest", "UpdateLbRouteExtensionRequest", "UpdateLbTrafficExtensionRequest", "UpdateMeshRequest", diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_metadata.json b/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_metadata.json index 2c6a6bb3a3a0..0a3314f7de1d 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_metadata.json +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/gapic_metadata.json @@ -15,6 +15,11 @@ "create_authz_extension" ] }, + "CreateLbEdgeExtension": { + "methods": [ + "create_lb_edge_extension" + ] + }, "CreateLbRouteExtension": { "methods": [ "create_lb_route_extension" @@ -30,6 +35,11 @@ "delete_authz_extension" ] }, + "DeleteLbEdgeExtension": { + "methods": [ + "delete_lb_edge_extension" + ] + }, "DeleteLbRouteExtension": { "methods": [ "delete_lb_route_extension" @@ -45,6 +55,11 @@ "get_authz_extension" ] }, + "GetLbEdgeExtension": { + "methods": [ + "get_lb_edge_extension" + ] + }, "GetLbRouteExtension": { "methods": [ "get_lb_route_extension" @@ -60,6 +75,11 @@ "list_authz_extensions" ] }, + "ListLbEdgeExtensions": { + "methods": [ + "list_lb_edge_extensions" + ] + }, "ListLbRouteExtensions": { "methods": [ "list_lb_route_extensions" @@ -75,6 +95,11 @@ "update_authz_extension" ] }, + "UpdateLbEdgeExtension": { + "methods": [ + "update_lb_edge_extension" + ] + }, "UpdateLbRouteExtension": { "methods": [ "update_lb_route_extension" @@ -95,6 +120,11 @@ "create_authz_extension" ] }, + "CreateLbEdgeExtension": { + "methods": [ + "create_lb_edge_extension" + ] + }, "CreateLbRouteExtension": { "methods": [ "create_lb_route_extension" @@ -110,6 +140,11 @@ "delete_authz_extension" ] }, + "DeleteLbEdgeExtension": { + "methods": [ + "delete_lb_edge_extension" + ] + }, "DeleteLbRouteExtension": { "methods": [ "delete_lb_route_extension" @@ -125,6 +160,11 @@ "get_authz_extension" ] }, + "GetLbEdgeExtension": { + "methods": [ + "get_lb_edge_extension" + ] + }, "GetLbRouteExtension": { "methods": [ "get_lb_route_extension" @@ -140,6 +180,11 @@ "list_authz_extensions" ] }, + "ListLbEdgeExtensions": { + "methods": [ + "list_lb_edge_extensions" + ] + }, "ListLbRouteExtensions": { "methods": [ "list_lb_route_extensions" @@ -155,6 +200,11 @@ "update_authz_extension" ] }, + "UpdateLbEdgeExtension": { + "methods": [ + "update_lb_edge_extension" + ] + }, "UpdateLbRouteExtension": { "methods": [ "update_lb_route_extension" @@ -175,6 +225,11 @@ "create_authz_extension" ] }, + "CreateLbEdgeExtension": { + "methods": [ + "create_lb_edge_extension" + ] + }, "CreateLbRouteExtension": { "methods": [ "create_lb_route_extension" @@ -190,6 +245,11 @@ "delete_authz_extension" ] }, + "DeleteLbEdgeExtension": { + "methods": [ + "delete_lb_edge_extension" + ] + }, "DeleteLbRouteExtension": { "methods": [ "delete_lb_route_extension" @@ -205,6 +265,11 @@ "get_authz_extension" ] }, + "GetLbEdgeExtension": { + "methods": [ + "get_lb_edge_extension" + ] + }, "GetLbRouteExtension": { "methods": [ "get_lb_route_extension" @@ -220,6 +285,11 @@ "list_authz_extensions" ] }, + "ListLbEdgeExtensions": { + "methods": [ + "list_lb_edge_extensions" + ] + }, "ListLbRouteExtensions": { "methods": [ "list_lb_route_extensions" @@ -235,6 +305,11 @@ "update_authz_extension" ] }, + "UpdateLbEdgeExtension": { + "methods": [ + "update_lb_edge_extension" + ] + }, "UpdateLbRouteExtension": { "methods": [ "update_lb_route_extension" diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py index 20a5b3444ecd..c6e3e32d31c8 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/async_client.py @@ -89,6 +89,10 @@ class DepServiceAsyncClient: parse_authz_extension_path = staticmethod( DepServiceClient.parse_authz_extension_path ) + lb_edge_extension_path = staticmethod(DepServiceClient.lb_edge_extension_path) + parse_lb_edge_extension_path = staticmethod( + DepServiceClient.parse_lb_edge_extension_path + ) lb_route_extension_path = staticmethod(DepServiceClient.lb_route_extension_path) parse_lb_route_extension_path = staticmethod( DepServiceClient.parse_lb_route_extension_path @@ -1662,6 +1666,685 @@ async def sample_delete_lb_route_extension(): # Done; return the response. return response + async def list_lb_edge_extensions( + self, + request: Optional[Union[dep.ListLbEdgeExtensionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLbEdgeExtensionsAsyncPager: + r"""Lists ``LbEdgeExtension`` resources in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + async def sample_list_lb_edge_extensions(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListLbEdgeExtensionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lb_edge_extensions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.network_services_v1.types.ListLbEdgeExtensionsRequest, dict]]): + The request object. Message for requesting list of ``LbEdgeExtension`` + resources. + parent (:class:`str`): + Required. The project and location from which the + ``LbEdgeExtension`` resources are listed. These values + are specified in the following format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_services_v1.services.dep_service.pagers.ListLbEdgeExtensionsAsyncPager: + Message for response to listing LbEdgeExtension + resources. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.ListLbEdgeExtensionsRequest): + request = dep.ListLbEdgeExtensionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_lb_edge_extensions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLbEdgeExtensionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_lb_edge_extension( + self, + request: Optional[Union[dep.GetLbEdgeExtensionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dep.LbEdgeExtension: + r"""Gets details of the specified ``LbEdgeExtension`` resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + async def sample_get_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetLbEdgeExtensionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_lb_edge_extension(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_services_v1.types.GetLbEdgeExtensionRequest, dict]]): + The request object. Message for getting a ``LbEdgeExtension`` resource. + name (:class:`str`): + Required. A name of the ``LbEdgeExtension`` resource to + get. Must be in the format + ``projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_services_v1.types.LbEdgeExtension: + LbEdgeExtension is a resource that lets the extension service influence + the selection of backend services and Cloud CDN cache + keys by modifying request headers. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.GetLbEdgeExtensionRequest): + request = dep.GetLbEdgeExtensionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_lb_edge_extension + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_lb_edge_extension( + self, + request: Optional[Union[dep.CreateLbEdgeExtensionRequest, dict]] = None, + *, + parent: Optional[str] = None, + lb_edge_extension: Optional[dep.LbEdgeExtension] = None, + lb_edge_extension_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new ``LbEdgeExtension`` resource in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + async def sample_create_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + lb_edge_extension = network_services_v1.LbEdgeExtension() + lb_edge_extension.name = "name_value" + lb_edge_extension.forwarding_rules = ['forwarding_rules_value1', 'forwarding_rules_value2'] + lb_edge_extension.extension_chains.name = "name_value" + lb_edge_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_edge_extension.extension_chains.extensions.name = "name_value" + lb_edge_extension.extension_chains.extensions.service = "service_value" + lb_edge_extension.load_balancing_scheme = "EXTERNAL_MANAGED" + + request = network_services_v1.CreateLbEdgeExtensionRequest( + parent="parent_value", + lb_edge_extension_id="lb_edge_extension_id_value", + lb_edge_extension=lb_edge_extension, + ) + + # Make the request + operation = client.create_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_services_v1.types.CreateLbEdgeExtensionRequest, dict]]): + The request object. Message for creating a ``LbEdgeExtension`` resource. + parent (:class:`str`): + Required. The parent resource of the ``LbEdgeExtension`` + resource. Must be in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lb_edge_extension (:class:`google.cloud.network_services_v1.types.LbEdgeExtension`): + Required. ``LbEdgeExtension`` resource to be created. + This corresponds to the ``lb_edge_extension`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lb_edge_extension_id (:class:`str`): + Required. User-provided ID of the ``LbEdgeExtension`` + resource to be created. + + This corresponds to the ``lb_edge_extension_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_services_v1.types.LbEdgeExtension` LbEdgeExtension is a resource that lets the extension service influence + the selection of backend services and Cloud CDN cache + keys by modifying request headers. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, lb_edge_extension, lb_edge_extension_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.CreateLbEdgeExtensionRequest): + request = dep.CreateLbEdgeExtensionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lb_edge_extension is not None: + request.lb_edge_extension = lb_edge_extension + if lb_edge_extension_id is not None: + request.lb_edge_extension_id = lb_edge_extension_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_lb_edge_extension + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + dep.LbEdgeExtension, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_lb_edge_extension( + self, + request: Optional[Union[dep.UpdateLbEdgeExtensionRequest, dict]] = None, + *, + lb_edge_extension: Optional[dep.LbEdgeExtension] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of the specified ``LbEdgeExtension`` + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + async def sample_update_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + lb_edge_extension = network_services_v1.LbEdgeExtension() + lb_edge_extension.name = "name_value" + lb_edge_extension.forwarding_rules = ['forwarding_rules_value1', 'forwarding_rules_value2'] + lb_edge_extension.extension_chains.name = "name_value" + lb_edge_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_edge_extension.extension_chains.extensions.name = "name_value" + lb_edge_extension.extension_chains.extensions.service = "service_value" + lb_edge_extension.load_balancing_scheme = "EXTERNAL_MANAGED" + + request = network_services_v1.UpdateLbEdgeExtensionRequest( + lb_edge_extension=lb_edge_extension, + ) + + # Make the request + operation = client.update_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_services_v1.types.UpdateLbEdgeExtensionRequest, dict]]): + The request object. Message for updating a ``LbEdgeExtension`` resource. + lb_edge_extension (:class:`google.cloud.network_services_v1.types.LbEdgeExtension`): + Required. ``LbEdgeExtension`` resource being updated. + This corresponds to the ``lb_edge_extension`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Used to specify the fields to be overwritten + in the ``LbEdgeExtension`` resource by the update. The + fields specified in the ``update_mask`` are relative to + the resource, not the full request. A field is + overwritten if it is in the mask. If the user does not + specify a mask, then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_services_v1.types.LbEdgeExtension` LbEdgeExtension is a resource that lets the extension service influence + the selection of backend services and Cloud CDN cache + keys by modifying request headers. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [lb_edge_extension, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.UpdateLbEdgeExtensionRequest): + request = dep.UpdateLbEdgeExtensionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if lb_edge_extension is not None: + request.lb_edge_extension = lb_edge_extension + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_lb_edge_extension + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("lb_edge_extension.name", request.lb_edge_extension.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + dep.LbEdgeExtension, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_lb_edge_extension( + self, + request: Optional[Union[dep.DeleteLbEdgeExtensionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes the specified ``LbEdgeExtension`` resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + async def sample_delete_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteLbEdgeExtensionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.network_services_v1.types.DeleteLbEdgeExtensionRequest, dict]]): + The request object. Message for deleting a ``LbEdgeExtension`` resource. + name (:class:`str`): + Required. The name of the ``LbEdgeExtension`` resource + to delete. Must be in the format + ``projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.DeleteLbEdgeExtensionRequest): + request = dep.DeleteLbEdgeExtensionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_lb_edge_extension + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_authz_extensions( self, request: Optional[Union[dep.ListAuthzExtensionsRequest, dict]] = None, diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py index 8db8095aad93..25256677d10f 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/client.py @@ -228,6 +228,28 @@ def parse_authz_extension_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def lb_edge_extension_path( + project: str, + location: str, + lb_edge_extension: str, + ) -> str: + """Returns a fully-qualified lb_edge_extension string.""" + return "projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}".format( + project=project, + location=location, + lb_edge_extension=lb_edge_extension, + ) + + @staticmethod + def parse_lb_edge_extension_path(path: str) -> Dict[str, str]: + """Parses a lb_edge_extension path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/lbEdgeExtensions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def lb_route_extension_path( project: str, @@ -2104,6 +2126,670 @@ def sample_delete_lb_route_extension(): # Done; return the response. return response + def list_lb_edge_extensions( + self, + request: Optional[Union[dep.ListLbEdgeExtensionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLbEdgeExtensionsPager: + r"""Lists ``LbEdgeExtension`` resources in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + def sample_list_lb_edge_extensions(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + request = network_services_v1.ListLbEdgeExtensionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lb_edge_extensions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.network_services_v1.types.ListLbEdgeExtensionsRequest, dict]): + The request object. Message for requesting list of ``LbEdgeExtension`` + resources. + parent (str): + Required. The project and location from which the + ``LbEdgeExtension`` resources are listed. These values + are specified in the following format: + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_services_v1.services.dep_service.pagers.ListLbEdgeExtensionsPager: + Message for response to listing LbEdgeExtension + resources. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.ListLbEdgeExtensionsRequest): + request = dep.ListLbEdgeExtensionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_lb_edge_extensions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLbEdgeExtensionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_lb_edge_extension( + self, + request: Optional[Union[dep.GetLbEdgeExtensionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> dep.LbEdgeExtension: + r"""Gets details of the specified ``LbEdgeExtension`` resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + def sample_get_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + request = network_services_v1.GetLbEdgeExtensionRequest( + name="name_value", + ) + + # Make the request + response = client.get_lb_edge_extension(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_services_v1.types.GetLbEdgeExtensionRequest, dict]): + The request object. Message for getting a ``LbEdgeExtension`` resource. + name (str): + Required. A name of the ``LbEdgeExtension`` resource to + get. Must be in the format + ``projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.network_services_v1.types.LbEdgeExtension: + LbEdgeExtension is a resource that lets the extension service influence + the selection of backend services and Cloud CDN cache + keys by modifying request headers. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.GetLbEdgeExtensionRequest): + request = dep.GetLbEdgeExtensionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_lb_edge_extension] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_lb_edge_extension( + self, + request: Optional[Union[dep.CreateLbEdgeExtensionRequest, dict]] = None, + *, + parent: Optional[str] = None, + lb_edge_extension: Optional[dep.LbEdgeExtension] = None, + lb_edge_extension_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new ``LbEdgeExtension`` resource in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + def sample_create_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + lb_edge_extension = network_services_v1.LbEdgeExtension() + lb_edge_extension.name = "name_value" + lb_edge_extension.forwarding_rules = ['forwarding_rules_value1', 'forwarding_rules_value2'] + lb_edge_extension.extension_chains.name = "name_value" + lb_edge_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_edge_extension.extension_chains.extensions.name = "name_value" + lb_edge_extension.extension_chains.extensions.service = "service_value" + lb_edge_extension.load_balancing_scheme = "EXTERNAL_MANAGED" + + request = network_services_v1.CreateLbEdgeExtensionRequest( + parent="parent_value", + lb_edge_extension_id="lb_edge_extension_id_value", + lb_edge_extension=lb_edge_extension, + ) + + # Make the request + operation = client.create_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_services_v1.types.CreateLbEdgeExtensionRequest, dict]): + The request object. Message for creating a ``LbEdgeExtension`` resource. + parent (str): + Required. The parent resource of the ``LbEdgeExtension`` + resource. Must be in the format + ``projects/{project}/locations/{location}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lb_edge_extension (google.cloud.network_services_v1.types.LbEdgeExtension): + Required. ``LbEdgeExtension`` resource to be created. + This corresponds to the ``lb_edge_extension`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lb_edge_extension_id (str): + Required. User-provided ID of the ``LbEdgeExtension`` + resource to be created. + + This corresponds to the ``lb_edge_extension_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_services_v1.types.LbEdgeExtension` LbEdgeExtension is a resource that lets the extension service influence + the selection of backend services and Cloud CDN cache + keys by modifying request headers. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, lb_edge_extension, lb_edge_extension_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.CreateLbEdgeExtensionRequest): + request = dep.CreateLbEdgeExtensionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if lb_edge_extension is not None: + request.lb_edge_extension = lb_edge_extension + if lb_edge_extension_id is not None: + request.lb_edge_extension_id = lb_edge_extension_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_lb_edge_extension] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + dep.LbEdgeExtension, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_lb_edge_extension( + self, + request: Optional[Union[dep.UpdateLbEdgeExtensionRequest, dict]] = None, + *, + lb_edge_extension: Optional[dep.LbEdgeExtension] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the parameters of the specified ``LbEdgeExtension`` + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + def sample_update_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + lb_edge_extension = network_services_v1.LbEdgeExtension() + lb_edge_extension.name = "name_value" + lb_edge_extension.forwarding_rules = ['forwarding_rules_value1', 'forwarding_rules_value2'] + lb_edge_extension.extension_chains.name = "name_value" + lb_edge_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_edge_extension.extension_chains.extensions.name = "name_value" + lb_edge_extension.extension_chains.extensions.service = "service_value" + lb_edge_extension.load_balancing_scheme = "EXTERNAL_MANAGED" + + request = network_services_v1.UpdateLbEdgeExtensionRequest( + lb_edge_extension=lb_edge_extension, + ) + + # Make the request + operation = client.update_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_services_v1.types.UpdateLbEdgeExtensionRequest, dict]): + The request object. Message for updating a ``LbEdgeExtension`` resource. + lb_edge_extension (google.cloud.network_services_v1.types.LbEdgeExtension): + Required. ``LbEdgeExtension`` resource being updated. + This corresponds to the ``lb_edge_extension`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Used to specify the fields to be overwritten + in the ``LbEdgeExtension`` resource by the update. The + fields specified in the ``update_mask`` are relative to + the resource, not the full request. A field is + overwritten if it is in the mask. If the user does not + specify a mask, then all fields are overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.network_services_v1.types.LbEdgeExtension` LbEdgeExtension is a resource that lets the extension service influence + the selection of backend services and Cloud CDN cache + keys by modifying request headers. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [lb_edge_extension, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.UpdateLbEdgeExtensionRequest): + request = dep.UpdateLbEdgeExtensionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if lb_edge_extension is not None: + request.lb_edge_extension = lb_edge_extension + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_lb_edge_extension] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("lb_edge_extension.name", request.lb_edge_extension.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + dep.LbEdgeExtension, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_lb_edge_extension( + self, + request: Optional[Union[dep.DeleteLbEdgeExtensionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes the specified ``LbEdgeExtension`` resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import network_services_v1 + + def sample_delete_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteLbEdgeExtensionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.network_services_v1.types.DeleteLbEdgeExtensionRequest, dict]): + The request object. Message for deleting a ``LbEdgeExtension`` resource. + name (str): + Required. The name of the ``LbEdgeExtension`` resource + to delete. Must be in the format + ``projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, dep.DeleteLbEdgeExtensionRequest): + request = dep.DeleteLbEdgeExtensionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_lb_edge_extension] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=common.OperationMetadata, + ) + + # Done; return the response. + return response + def list_authz_extensions( self, request: Optional[Union[dep.ListAuthzExtensionsRequest, dict]] = None, diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/pagers.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/pagers.py index 18758b35c004..ed2135bbb5db 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/pagers.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/pagers.py @@ -353,6 +353,162 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListLbEdgeExtensionsPager: + """A pager for iterating through ``list_lb_edge_extensions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_services_v1.types.ListLbEdgeExtensionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``lb_edge_extensions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLbEdgeExtensions`` requests and continue to iterate + through the ``lb_edge_extensions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_services_v1.types.ListLbEdgeExtensionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dep.ListLbEdgeExtensionsResponse], + request: dep.ListLbEdgeExtensionsRequest, + response: dep.ListLbEdgeExtensionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_services_v1.types.ListLbEdgeExtensionsRequest): + The initial request object. + response (google.cloud.network_services_v1.types.ListLbEdgeExtensionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dep.ListLbEdgeExtensionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[dep.ListLbEdgeExtensionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[dep.LbEdgeExtension]: + for page in self.pages: + yield from page.lb_edge_extensions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListLbEdgeExtensionsAsyncPager: + """A pager for iterating through ``list_lb_edge_extensions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.network_services_v1.types.ListLbEdgeExtensionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``lb_edge_extensions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLbEdgeExtensions`` requests and continue to iterate + through the ``lb_edge_extensions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.network_services_v1.types.ListLbEdgeExtensionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dep.ListLbEdgeExtensionsResponse]], + request: dep.ListLbEdgeExtensionsRequest, + response: dep.ListLbEdgeExtensionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.network_services_v1.types.ListLbEdgeExtensionsRequest): + The initial request object. + response (google.cloud.network_services_v1.types.ListLbEdgeExtensionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = dep.ListLbEdgeExtensionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[dep.ListLbEdgeExtensionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[dep.LbEdgeExtension]: + async def async_generator(): + async for page in self.pages: + for response in page.lb_edge_extensions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListAuthzExtensionsPager: """A pager for iterating through ``list_authz_extensions`` requests. diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/base.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/base.py index b6e85d043918..174340ef00c2 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/base.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/base.py @@ -187,6 +187,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_lb_edge_extensions: gapic_v1.method.wrap_method( + self.list_lb_edge_extensions, + default_timeout=None, + client_info=client_info, + ), + self.get_lb_edge_extension: gapic_v1.method.wrap_method( + self.get_lb_edge_extension, + default_timeout=None, + client_info=client_info, + ), + self.create_lb_edge_extension: gapic_v1.method.wrap_method( + self.create_lb_edge_extension, + default_timeout=None, + client_info=client_info, + ), + self.update_lb_edge_extension: gapic_v1.method.wrap_method( + self.update_lb_edge_extension, + default_timeout=None, + client_info=client_info, + ), + self.delete_lb_edge_extension: gapic_v1.method.wrap_method( + self.delete_lb_edge_extension, + default_timeout=None, + client_info=client_info, + ), self.list_authz_extensions: gapic_v1.method.wrap_method( self.list_authz_extensions, default_timeout=None, @@ -369,6 +394,54 @@ def delete_lb_route_extension( ]: raise NotImplementedError() + @property + def list_lb_edge_extensions( + self, + ) -> Callable[ + [dep.ListLbEdgeExtensionsRequest], + Union[ + dep.ListLbEdgeExtensionsResponse, + Awaitable[dep.ListLbEdgeExtensionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_lb_edge_extension( + self, + ) -> Callable[ + [dep.GetLbEdgeExtensionRequest], + Union[dep.LbEdgeExtension, Awaitable[dep.LbEdgeExtension]], + ]: + raise NotImplementedError() + + @property + def create_lb_edge_extension( + self, + ) -> Callable[ + [dep.CreateLbEdgeExtensionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_lb_edge_extension( + self, + ) -> Callable[ + [dep.UpdateLbEdgeExtensionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_lb_edge_extension( + self, + ) -> Callable[ + [dep.DeleteLbEdgeExtensionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_authz_extensions( self, diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/grpc.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/grpc.py index 706b37e3157b..7960a1f97e97 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/grpc.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/grpc.py @@ -620,6 +620,139 @@ def delete_lb_route_extension( ) return self._stubs["delete_lb_route_extension"] + @property + def list_lb_edge_extensions( + self, + ) -> Callable[[dep.ListLbEdgeExtensionsRequest], dep.ListLbEdgeExtensionsResponse]: + r"""Return a callable for the list lb edge extensions method over gRPC. + + Lists ``LbEdgeExtension`` resources in a given project and + location. + + Returns: + Callable[[~.ListLbEdgeExtensionsRequest], + ~.ListLbEdgeExtensionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_lb_edge_extensions" not in self._stubs: + self._stubs["list_lb_edge_extensions"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/ListLbEdgeExtensions", + request_serializer=dep.ListLbEdgeExtensionsRequest.serialize, + response_deserializer=dep.ListLbEdgeExtensionsResponse.deserialize, + ) + return self._stubs["list_lb_edge_extensions"] + + @property + def get_lb_edge_extension( + self, + ) -> Callable[[dep.GetLbEdgeExtensionRequest], dep.LbEdgeExtension]: + r"""Return a callable for the get lb edge extension method over gRPC. + + Gets details of the specified ``LbEdgeExtension`` resource. + + Returns: + Callable[[~.GetLbEdgeExtensionRequest], + ~.LbEdgeExtension]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_lb_edge_extension" not in self._stubs: + self._stubs["get_lb_edge_extension"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/GetLbEdgeExtension", + request_serializer=dep.GetLbEdgeExtensionRequest.serialize, + response_deserializer=dep.LbEdgeExtension.deserialize, + ) + return self._stubs["get_lb_edge_extension"] + + @property + def create_lb_edge_extension( + self, + ) -> Callable[[dep.CreateLbEdgeExtensionRequest], operations_pb2.Operation]: + r"""Return a callable for the create lb edge extension method over gRPC. + + Creates a new ``LbEdgeExtension`` resource in a given project + and location. + + Returns: + Callable[[~.CreateLbEdgeExtensionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_lb_edge_extension" not in self._stubs: + self._stubs["create_lb_edge_extension"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/CreateLbEdgeExtension", + request_serializer=dep.CreateLbEdgeExtensionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_lb_edge_extension"] + + @property + def update_lb_edge_extension( + self, + ) -> Callable[[dep.UpdateLbEdgeExtensionRequest], operations_pb2.Operation]: + r"""Return a callable for the update lb edge extension method over gRPC. + + Updates the parameters of the specified ``LbEdgeExtension`` + resource. + + Returns: + Callable[[~.UpdateLbEdgeExtensionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_lb_edge_extension" not in self._stubs: + self._stubs["update_lb_edge_extension"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/UpdateLbEdgeExtension", + request_serializer=dep.UpdateLbEdgeExtensionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_lb_edge_extension"] + + @property + def delete_lb_edge_extension( + self, + ) -> Callable[[dep.DeleteLbEdgeExtensionRequest], operations_pb2.Operation]: + r"""Return a callable for the delete lb edge extension method over gRPC. + + Deletes the specified ``LbEdgeExtension`` resource. + + Returns: + Callable[[~.DeleteLbEdgeExtensionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_lb_edge_extension" not in self._stubs: + self._stubs["delete_lb_edge_extension"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/DeleteLbEdgeExtension", + request_serializer=dep.DeleteLbEdgeExtensionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_lb_edge_extension"] + @property def list_authz_extensions( self, diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/grpc_asyncio.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/grpc_asyncio.py index ba88ce40c8d6..f8c68cb4d0cc 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/grpc_asyncio.py @@ -643,6 +643,147 @@ def delete_lb_route_extension( ) return self._stubs["delete_lb_route_extension"] + @property + def list_lb_edge_extensions( + self, + ) -> Callable[ + [dep.ListLbEdgeExtensionsRequest], Awaitable[dep.ListLbEdgeExtensionsResponse] + ]: + r"""Return a callable for the list lb edge extensions method over gRPC. + + Lists ``LbEdgeExtension`` resources in a given project and + location. + + Returns: + Callable[[~.ListLbEdgeExtensionsRequest], + Awaitable[~.ListLbEdgeExtensionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_lb_edge_extensions" not in self._stubs: + self._stubs["list_lb_edge_extensions"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/ListLbEdgeExtensions", + request_serializer=dep.ListLbEdgeExtensionsRequest.serialize, + response_deserializer=dep.ListLbEdgeExtensionsResponse.deserialize, + ) + return self._stubs["list_lb_edge_extensions"] + + @property + def get_lb_edge_extension( + self, + ) -> Callable[[dep.GetLbEdgeExtensionRequest], Awaitable[dep.LbEdgeExtension]]: + r"""Return a callable for the get lb edge extension method over gRPC. + + Gets details of the specified ``LbEdgeExtension`` resource. + + Returns: + Callable[[~.GetLbEdgeExtensionRequest], + Awaitable[~.LbEdgeExtension]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_lb_edge_extension" not in self._stubs: + self._stubs["get_lb_edge_extension"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/GetLbEdgeExtension", + request_serializer=dep.GetLbEdgeExtensionRequest.serialize, + response_deserializer=dep.LbEdgeExtension.deserialize, + ) + return self._stubs["get_lb_edge_extension"] + + @property + def create_lb_edge_extension( + self, + ) -> Callable[ + [dep.CreateLbEdgeExtensionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create lb edge extension method over gRPC. + + Creates a new ``LbEdgeExtension`` resource in a given project + and location. + + Returns: + Callable[[~.CreateLbEdgeExtensionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_lb_edge_extension" not in self._stubs: + self._stubs["create_lb_edge_extension"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/CreateLbEdgeExtension", + request_serializer=dep.CreateLbEdgeExtensionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_lb_edge_extension"] + + @property + def update_lb_edge_extension( + self, + ) -> Callable[ + [dep.UpdateLbEdgeExtensionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update lb edge extension method over gRPC. + + Updates the parameters of the specified ``LbEdgeExtension`` + resource. + + Returns: + Callable[[~.UpdateLbEdgeExtensionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_lb_edge_extension" not in self._stubs: + self._stubs["update_lb_edge_extension"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/UpdateLbEdgeExtension", + request_serializer=dep.UpdateLbEdgeExtensionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_lb_edge_extension"] + + @property + def delete_lb_edge_extension( + self, + ) -> Callable[ + [dep.DeleteLbEdgeExtensionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete lb edge extension method over gRPC. + + Deletes the specified ``LbEdgeExtension`` resource. + + Returns: + Callable[[~.DeleteLbEdgeExtensionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_lb_edge_extension" not in self._stubs: + self._stubs["delete_lb_edge_extension"] = self._logged_channel.unary_unary( + "/google.cloud.networkservices.v1.DepService/DeleteLbEdgeExtension", + request_serializer=dep.DeleteLbEdgeExtensionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_lb_edge_extension"] + @property def list_authz_extensions( self, @@ -837,6 +978,31 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_lb_edge_extensions: self._wrap_method( + self.list_lb_edge_extensions, + default_timeout=None, + client_info=client_info, + ), + self.get_lb_edge_extension: self._wrap_method( + self.get_lb_edge_extension, + default_timeout=None, + client_info=client_info, + ), + self.create_lb_edge_extension: self._wrap_method( + self.create_lb_edge_extension, + default_timeout=None, + client_info=client_info, + ), + self.update_lb_edge_extension: self._wrap_method( + self.update_lb_edge_extension, + default_timeout=None, + client_info=client_info, + ), + self.delete_lb_edge_extension: self._wrap_method( + self.delete_lb_edge_extension, + default_timeout=None, + client_info=client_info, + ), self.list_authz_extensions: self._wrap_method( self.list_authz_extensions, default_timeout=None, diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest.py index c5f5f09dbe87..19b0d85ed2ad 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest.py @@ -84,6 +84,14 @@ def post_create_authz_extension(self, response): logging.log(f"Received response: {response}") return response + def pre_create_lb_edge_extension(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_lb_edge_extension(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_lb_route_extension(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -108,6 +116,14 @@ def post_delete_authz_extension(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_lb_edge_extension(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_lb_edge_extension(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_lb_route_extension(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -132,6 +148,14 @@ def post_get_authz_extension(self, response): logging.log(f"Received response: {response}") return response + def pre_get_lb_edge_extension(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_lb_edge_extension(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_lb_route_extension(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -156,6 +180,14 @@ def post_list_authz_extensions(self, response): logging.log(f"Received response: {response}") return response + def pre_list_lb_edge_extensions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_lb_edge_extensions(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_lb_route_extensions(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -180,6 +212,14 @@ def post_update_authz_extension(self, response): logging.log(f"Received response: {response}") return response + def pre_update_lb_edge_extension(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_lb_edge_extension(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_lb_route_extension(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -250,6 +290,54 @@ def post_create_authz_extension_with_metadata( """ return response, metadata + def pre_create_lb_edge_extension( + self, + request: dep.CreateLbEdgeExtensionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dep.CreateLbEdgeExtensionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_lb_edge_extension + + Override in a subclass to manipulate the request or metadata + before they are sent to the DepService server. + """ + return request, metadata + + def post_create_lb_edge_extension( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_lb_edge_extension + + DEPRECATED. Please use the `post_create_lb_edge_extension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DepService server but before + it is returned to user code. This `post_create_lb_edge_extension` interceptor runs + before the `post_create_lb_edge_extension_with_metadata` interceptor. + """ + return response + + def post_create_lb_edge_extension_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_lb_edge_extension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DepService server but before it is returned to user code. + + We recommend only using this `post_create_lb_edge_extension_with_metadata` + interceptor in new development instead of the `post_create_lb_edge_extension` interceptor. + When both interceptors are used, this `post_create_lb_edge_extension_with_metadata` interceptor runs after the + `post_create_lb_edge_extension` interceptor. The (possibly modified) response returned by + `post_create_lb_edge_extension` will be passed to + `post_create_lb_edge_extension_with_metadata`. + """ + return response, metadata + def pre_create_lb_route_extension( self, request: dep.CreateLbRouteExtensionRequest, @@ -394,6 +482,54 @@ def post_delete_authz_extension_with_metadata( """ return response, metadata + def pre_delete_lb_edge_extension( + self, + request: dep.DeleteLbEdgeExtensionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dep.DeleteLbEdgeExtensionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_lb_edge_extension + + Override in a subclass to manipulate the request or metadata + before they are sent to the DepService server. + """ + return request, metadata + + def post_delete_lb_edge_extension( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_lb_edge_extension + + DEPRECATED. Please use the `post_delete_lb_edge_extension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DepService server but before + it is returned to user code. This `post_delete_lb_edge_extension` interceptor runs + before the `post_delete_lb_edge_extension_with_metadata` interceptor. + """ + return response + + def post_delete_lb_edge_extension_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_lb_edge_extension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DepService server but before it is returned to user code. + + We recommend only using this `post_delete_lb_edge_extension_with_metadata` + interceptor in new development instead of the `post_delete_lb_edge_extension` interceptor. + When both interceptors are used, this `post_delete_lb_edge_extension_with_metadata` interceptor runs after the + `post_delete_lb_edge_extension` interceptor. The (possibly modified) response returned by + `post_delete_lb_edge_extension` will be passed to + `post_delete_lb_edge_extension_with_metadata`. + """ + return response, metadata + def pre_delete_lb_route_extension( self, request: dep.DeleteLbRouteExtensionRequest, @@ -536,6 +672,52 @@ def post_get_authz_extension_with_metadata( """ return response, metadata + def pre_get_lb_edge_extension( + self, + request: dep.GetLbEdgeExtensionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dep.GetLbEdgeExtensionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_lb_edge_extension + + Override in a subclass to manipulate the request or metadata + before they are sent to the DepService server. + """ + return request, metadata + + def post_get_lb_edge_extension( + self, response: dep.LbEdgeExtension + ) -> dep.LbEdgeExtension: + """Post-rpc interceptor for get_lb_edge_extension + + DEPRECATED. Please use the `post_get_lb_edge_extension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DepService server but before + it is returned to user code. This `post_get_lb_edge_extension` interceptor runs + before the `post_get_lb_edge_extension_with_metadata` interceptor. + """ + return response + + def post_get_lb_edge_extension_with_metadata( + self, + response: dep.LbEdgeExtension, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[dep.LbEdgeExtension, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_lb_edge_extension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DepService server but before it is returned to user code. + + We recommend only using this `post_get_lb_edge_extension_with_metadata` + interceptor in new development instead of the `post_get_lb_edge_extension` interceptor. + When both interceptors are used, this `post_get_lb_edge_extension_with_metadata` interceptor runs after the + `post_get_lb_edge_extension` interceptor. The (possibly modified) response returned by + `post_get_lb_edge_extension` will be passed to + `post_get_lb_edge_extension_with_metadata`. + """ + return response, metadata + def pre_get_lb_route_extension( self, request: dep.GetLbRouteExtensionRequest, @@ -678,6 +860,56 @@ def post_list_authz_extensions_with_metadata( """ return response, metadata + def pre_list_lb_edge_extensions( + self, + request: dep.ListLbEdgeExtensionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dep.ListLbEdgeExtensionsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_lb_edge_extensions + + Override in a subclass to manipulate the request or metadata + before they are sent to the DepService server. + """ + return request, metadata + + def post_list_lb_edge_extensions( + self, response: dep.ListLbEdgeExtensionsResponse + ) -> dep.ListLbEdgeExtensionsResponse: + """Post-rpc interceptor for list_lb_edge_extensions + + DEPRECATED. Please use the `post_list_lb_edge_extensions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DepService server but before + it is returned to user code. This `post_list_lb_edge_extensions` interceptor runs + before the `post_list_lb_edge_extensions_with_metadata` interceptor. + """ + return response + + def post_list_lb_edge_extensions_with_metadata( + self, + response: dep.ListLbEdgeExtensionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dep.ListLbEdgeExtensionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_lb_edge_extensions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DepService server but before it is returned to user code. + + We recommend only using this `post_list_lb_edge_extensions_with_metadata` + interceptor in new development instead of the `post_list_lb_edge_extensions` interceptor. + When both interceptors are used, this `post_list_lb_edge_extensions_with_metadata` interceptor runs after the + `post_list_lb_edge_extensions` interceptor. The (possibly modified) response returned by + `post_list_lb_edge_extensions` will be passed to + `post_list_lb_edge_extensions_with_metadata`. + """ + return response, metadata + def pre_list_lb_route_extensions( self, request: dep.ListLbRouteExtensionsRequest, @@ -826,6 +1058,54 @@ def post_update_authz_extension_with_metadata( """ return response, metadata + def pre_update_lb_edge_extension( + self, + request: dep.UpdateLbEdgeExtensionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + dep.UpdateLbEdgeExtensionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_lb_edge_extension + + Override in a subclass to manipulate the request or metadata + before they are sent to the DepService server. + """ + return request, metadata + + def post_update_lb_edge_extension( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_lb_edge_extension + + DEPRECATED. Please use the `post_update_lb_edge_extension_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DepService server but before + it is returned to user code. This `post_update_lb_edge_extension` interceptor runs + before the `post_update_lb_edge_extension_with_metadata` interceptor. + """ + return response + + def post_update_lb_edge_extension_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_lb_edge_extension + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DepService server but before it is returned to user code. + + We recommend only using this `post_update_lb_edge_extension_with_metadata` + interceptor in new development instead of the `post_update_lb_edge_extension` interceptor. + When both interceptors are used, this `post_update_lb_edge_extension_with_metadata` interceptor runs after the + `post_update_lb_edge_extension` interceptor. The (possibly modified) response returned by + `post_update_lb_edge_extension` will be passed to + `post_update_lb_edge_extension_with_metadata`. + """ + return response, metadata + def pre_update_lb_route_extension( self, request: dep.UpdateLbRouteExtensionRequest, @@ -1435,11 +1715,11 @@ def __call__( ) return resp - class _CreateLbRouteExtension( - _BaseDepServiceRestTransport._BaseCreateLbRouteExtension, DepServiceRestStub + class _CreateLbEdgeExtension( + _BaseDepServiceRestTransport._BaseCreateLbEdgeExtension, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.CreateLbRouteExtension") + return hash("DepServiceRestTransport.CreateLbEdgeExtension") @staticmethod def _get_response( @@ -1466,17 +1746,17 @@ def _get_response( def __call__( self, - request: dep.CreateLbRouteExtensionRequest, + request: dep.CreateLbEdgeExtensionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the create lb route extension method over HTTP. + r"""Call the create lb edge extension method over HTTP. Args: - request (~.dep.CreateLbRouteExtensionRequest): - The request object. Message for creating a ``LbRouteExtension`` resource. + request (~.dep.CreateLbEdgeExtensionRequest): + The request object. Message for creating a ``LbEdgeExtension`` resource. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1494,22 +1774,22 @@ def __call__( """ http_options = ( - _BaseDepServiceRestTransport._BaseCreateLbRouteExtension._get_http_options() + _BaseDepServiceRestTransport._BaseCreateLbEdgeExtension._get_http_options() ) - request, metadata = self._interceptor.pre_create_lb_route_extension( + request, metadata = self._interceptor.pre_create_lb_edge_extension( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseCreateLbRouteExtension._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseCreateLbEdgeExtension._get_transcoded_request( http_options, request ) - body = _BaseDepServiceRestTransport._BaseCreateLbRouteExtension._get_request_body_json( + body = _BaseDepServiceRestTransport._BaseCreateLbEdgeExtension._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseCreateLbRouteExtension._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseCreateLbEdgeExtension._get_query_params_json( transcoded_request ) @@ -1531,17 +1811,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.CreateLbRouteExtension", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.CreateLbEdgeExtension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "CreateLbRouteExtension", + "rpcName": "CreateLbEdgeExtension", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._CreateLbRouteExtension._get_response( + response = DepServiceRestTransport._CreateLbEdgeExtension._get_response( self._host, metadata, query_params, @@ -1560,9 +1840,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_lb_route_extension(resp) + resp = self._interceptor.post_create_lb_edge_extension(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_lb_route_extension_with_metadata( + resp, _ = self._interceptor.post_create_lb_edge_extension_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -1578,21 +1858,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networkservices_v1.DepServiceClient.create_lb_route_extension", + "Received response for google.cloud.networkservices_v1.DepServiceClient.create_lb_edge_extension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "CreateLbRouteExtension", + "rpcName": "CreateLbEdgeExtension", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _CreateLbTrafficExtension( - _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension, DepServiceRestStub + class _CreateLbRouteExtension( + _BaseDepServiceRestTransport._BaseCreateLbRouteExtension, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.CreateLbTrafficExtension") + return hash("DepServiceRestTransport.CreateLbRouteExtension") @staticmethod def _get_response( @@ -1619,51 +1899,50 @@ def _get_response( def __call__( self, - request: dep.CreateLbTrafficExtensionRequest, + request: dep.CreateLbRouteExtensionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the create lb traffic - extension method over HTTP. + r"""Call the create lb route extension method over HTTP. - Args: - request (~.dep.CreateLbTrafficExtensionRequest): - The request object. Message for creating a ``LbTrafficExtension`` resource. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.dep.CreateLbRouteExtensionRequest): + The request object. Message for creating a ``LbRouteExtension`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension._get_http_options() + _BaseDepServiceRestTransport._BaseCreateLbRouteExtension._get_http_options() ) - request, metadata = self._interceptor.pre_create_lb_traffic_extension( + request, metadata = self._interceptor.pre_create_lb_route_extension( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseCreateLbRouteExtension._get_transcoded_request( http_options, request ) - body = _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension._get_request_body_json( + body = _BaseDepServiceRestTransport._BaseCreateLbRouteExtension._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseCreateLbRouteExtension._get_query_params_json( transcoded_request ) @@ -1685,17 +1964,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.CreateLbTrafficExtension", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.CreateLbRouteExtension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "CreateLbTrafficExtension", + "rpcName": "CreateLbRouteExtension", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._CreateLbTrafficExtension._get_response( + response = DepServiceRestTransport._CreateLbRouteExtension._get_response( self._host, metadata, query_params, @@ -1714,9 +1993,163 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_lb_traffic_extension(resp) + resp = self._interceptor.post_create_lb_route_extension(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_lb_traffic_extension_with_metadata( + resp, _ = self._interceptor.post_create_lb_route_extension_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkservices_v1.DepServiceClient.create_lb_route_extension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "CreateLbRouteExtension", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateLbTrafficExtension( + _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension, DepServiceRestStub + ): + def __hash__(self): + return hash("DepServiceRestTransport.CreateLbTrafficExtension") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: dep.CreateLbTrafficExtensionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create lb traffic + extension method over HTTP. + + Args: + request (~.dep.CreateLbTrafficExtensionRequest): + The request object. Message for creating a ``LbTrafficExtension`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_lb_traffic_extension( + request, metadata + ) + transcoded_request = _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension._get_transcoded_request( + http_options, request + ) + + body = _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDepServiceRestTransport._BaseCreateLbTrafficExtension._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.CreateLbTrafficExtension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "CreateLbTrafficExtension", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DepServiceRestTransport._CreateLbTrafficExtension._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_lb_traffic_extension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_lb_traffic_extension_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -1742,11 +2175,452 @@ def __call__( ) return resp - class _DeleteAuthzExtension( - _BaseDepServiceRestTransport._BaseDeleteAuthzExtension, DepServiceRestStub + class _DeleteAuthzExtension( + _BaseDepServiceRestTransport._BaseDeleteAuthzExtension, DepServiceRestStub + ): + def __hash__(self): + return hash("DepServiceRestTransport.DeleteAuthzExtension") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dep.DeleteAuthzExtensionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete authz extension method over HTTP. + + Args: + request (~.dep.DeleteAuthzExtensionRequest): + The request object. Message for deleting a ``AuthzExtension`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseDepServiceRestTransport._BaseDeleteAuthzExtension._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_authz_extension( + request, metadata + ) + transcoded_request = _BaseDepServiceRestTransport._BaseDeleteAuthzExtension._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDepServiceRestTransport._BaseDeleteAuthzExtension._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.DeleteAuthzExtension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "DeleteAuthzExtension", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DepServiceRestTransport._DeleteAuthzExtension._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_authz_extension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_authz_extension_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkservices_v1.DepServiceClient.delete_authz_extension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "DeleteAuthzExtension", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteLbEdgeExtension( + _BaseDepServiceRestTransport._BaseDeleteLbEdgeExtension, DepServiceRestStub + ): + def __hash__(self): + return hash("DepServiceRestTransport.DeleteLbEdgeExtension") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dep.DeleteLbEdgeExtensionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete lb edge extension method over HTTP. + + Args: + request (~.dep.DeleteLbEdgeExtensionRequest): + The request object. Message for deleting a ``LbEdgeExtension`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseDepServiceRestTransport._BaseDeleteLbEdgeExtension._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_lb_edge_extension( + request, metadata + ) + transcoded_request = _BaseDepServiceRestTransport._BaseDeleteLbEdgeExtension._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDepServiceRestTransport._BaseDeleteLbEdgeExtension._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.DeleteLbEdgeExtension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "DeleteLbEdgeExtension", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DepServiceRestTransport._DeleteLbEdgeExtension._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_lb_edge_extension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_lb_edge_extension_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkservices_v1.DepServiceClient.delete_lb_edge_extension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "DeleteLbEdgeExtension", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteLbRouteExtension( + _BaseDepServiceRestTransport._BaseDeleteLbRouteExtension, DepServiceRestStub + ): + def __hash__(self): + return hash("DepServiceRestTransport.DeleteLbRouteExtension") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: dep.DeleteLbRouteExtensionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete lb route extension method over HTTP. + + Args: + request (~.dep.DeleteLbRouteExtensionRequest): + The request object. Message for deleting a ``LbRouteExtension`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseDepServiceRestTransport._BaseDeleteLbRouteExtension._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_lb_route_extension( + request, metadata + ) + transcoded_request = _BaseDepServiceRestTransport._BaseDeleteLbRouteExtension._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDepServiceRestTransport._BaseDeleteLbRouteExtension._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.DeleteLbRouteExtension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "DeleteLbRouteExtension", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DepServiceRestTransport._DeleteLbRouteExtension._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_lb_route_extension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_lb_route_extension_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkservices_v1.DepServiceClient.delete_lb_route_extension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "DeleteLbRouteExtension", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteLbTrafficExtension( + _BaseDepServiceRestTransport._BaseDeleteLbTrafficExtension, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.DeleteAuthzExtension") + return hash("DepServiceRestTransport.DeleteLbTrafficExtension") @staticmethod def _get_response( @@ -1772,46 +2646,47 @@ def _get_response( def __call__( self, - request: dep.DeleteAuthzExtensionRequest, + request: dep.DeleteLbTrafficExtensionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the delete authz extension method over HTTP. + r"""Call the delete lb traffic + extension method over HTTP. - Args: - request (~.dep.DeleteAuthzExtensionRequest): - The request object. Message for deleting a ``AuthzExtension`` resource. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.dep.DeleteLbTrafficExtensionRequest): + The request object. Message for deleting a ``LbTrafficExtension`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseDepServiceRestTransport._BaseDeleteAuthzExtension._get_http_options() + _BaseDepServiceRestTransport._BaseDeleteLbTrafficExtension._get_http_options() ) - request, metadata = self._interceptor.pre_delete_authz_extension( + request, metadata = self._interceptor.pre_delete_lb_traffic_extension( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseDeleteAuthzExtension._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseDeleteLbTrafficExtension._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseDeleteAuthzExtension._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseDeleteLbTrafficExtension._get_query_params_json( transcoded_request ) @@ -1833,17 +2708,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.DeleteAuthzExtension", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.DeleteLbTrafficExtension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "DeleteAuthzExtension", + "rpcName": "DeleteLbTrafficExtension", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._DeleteAuthzExtension._get_response( + response = DepServiceRestTransport._DeleteLbTrafficExtension._get_response( self._host, metadata, query_params, @@ -1861,9 +2736,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_authz_extension(resp) + resp = self._interceptor.post_delete_lb_traffic_extension(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_authz_extension_with_metadata( + resp, _ = self._interceptor.post_delete_lb_traffic_extension_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -1879,21 +2754,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networkservices_v1.DepServiceClient.delete_authz_extension", + "Received response for google.cloud.networkservices_v1.DepServiceClient.delete_lb_traffic_extension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "DeleteAuthzExtension", + "rpcName": "DeleteLbTrafficExtension", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteLbRouteExtension( - _BaseDepServiceRestTransport._BaseDeleteLbRouteExtension, DepServiceRestStub + class _GetAuthzExtension( + _BaseDepServiceRestTransport._BaseGetAuthzExtension, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.DeleteLbRouteExtension") + return hash("DepServiceRestTransport.GetAuthzExtension") @staticmethod def _get_response( @@ -1919,17 +2794,17 @@ def _get_response( def __call__( self, - request: dep.DeleteLbRouteExtensionRequest, + request: dep.GetAuthzExtensionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete lb route extension method over HTTP. + ) -> dep.AuthzExtension: + r"""Call the get authz extension method over HTTP. Args: - request (~.dep.DeleteLbRouteExtensionRequest): - The request object. Message for deleting a ``LbRouteExtension`` resource. + request (~.dep.GetAuthzExtensionRequest): + The request object. Message for getting a ``AuthzExtension`` resource. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1939,26 +2814,26 @@ def __call__( be of type `bytes`. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.dep.AuthzExtension: + ``AuthzExtension`` is a resource that allows traffic + forwarding to a callout backend service to make an + authorization decision. """ http_options = ( - _BaseDepServiceRestTransport._BaseDeleteLbRouteExtension._get_http_options() + _BaseDepServiceRestTransport._BaseGetAuthzExtension._get_http_options() ) - request, metadata = self._interceptor.pre_delete_lb_route_extension( + request, metadata = self._interceptor.pre_get_authz_extension( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseDeleteLbRouteExtension._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseGetAuthzExtension._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseDeleteLbRouteExtension._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseGetAuthzExtension._get_query_params_json( transcoded_request ) @@ -1970,7 +2845,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -1980,17 +2855,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.DeleteLbRouteExtension", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.GetAuthzExtension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "DeleteLbRouteExtension", + "rpcName": "GetAuthzExtension", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._DeleteLbRouteExtension._get_response( + response = DepServiceRestTransport._GetAuthzExtension._get_response( self._host, metadata, query_params, @@ -2005,19 +2880,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = dep.AuthzExtension() + pb_resp = dep.AuthzExtension.pb(resp) - resp = self._interceptor.post_delete_lb_route_extension(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_authz_extension(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_lb_route_extension_with_metadata( + resp, _ = self._interceptor.post_get_authz_extension_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = dep.AuthzExtension.to_json(response) except: response_payload = None http_response = { @@ -2026,21 +2903,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networkservices_v1.DepServiceClient.delete_lb_route_extension", + "Received response for google.cloud.networkservices_v1.DepServiceClient.get_authz_extension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "DeleteLbRouteExtension", + "rpcName": "GetAuthzExtension", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteLbTrafficExtension( - _BaseDepServiceRestTransport._BaseDeleteLbTrafficExtension, DepServiceRestStub + class _GetLbEdgeExtension( + _BaseDepServiceRestTransport._BaseGetLbEdgeExtension, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.DeleteLbTrafficExtension") + return hash("DepServiceRestTransport.GetLbEdgeExtension") @staticmethod def _get_response( @@ -2066,47 +2943,47 @@ def _get_response( def __call__( self, - request: dep.DeleteLbTrafficExtensionRequest, + request: dep.GetLbEdgeExtensionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the delete lb traffic - extension method over HTTP. + ) -> dep.LbEdgeExtension: + r"""Call the get lb edge extension method over HTTP. - Args: - request (~.dep.DeleteLbTrafficExtensionRequest): - The request object. Message for deleting a ``LbTrafficExtension`` resource. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.dep.GetLbEdgeExtensionRequest): + The request object. Message for getting a ``LbEdgeExtension`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.dep.LbEdgeExtension: + ``LbEdgeExtension`` is a resource that lets the + extension service influence the selection of backend + services and Cloud CDN cache keys by modifying request + headers. """ http_options = ( - _BaseDepServiceRestTransport._BaseDeleteLbTrafficExtension._get_http_options() + _BaseDepServiceRestTransport._BaseGetLbEdgeExtension._get_http_options() ) - request, metadata = self._interceptor.pre_delete_lb_traffic_extension( + request, metadata = self._interceptor.pre_get_lb_edge_extension( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseDeleteLbTrafficExtension._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseGetLbEdgeExtension._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseDeleteLbTrafficExtension._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseGetLbEdgeExtension._get_query_params_json( transcoded_request ) @@ -2118,7 +2995,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -2128,17 +3005,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.DeleteLbTrafficExtension", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.GetLbEdgeExtension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "DeleteLbTrafficExtension", + "rpcName": "GetLbEdgeExtension", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._DeleteLbTrafficExtension._get_response( + response = DepServiceRestTransport._GetLbEdgeExtension._get_response( self._host, metadata, query_params, @@ -2153,19 +3030,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = dep.LbEdgeExtension() + pb_resp = dep.LbEdgeExtension.pb(resp) - resp = self._interceptor.post_delete_lb_traffic_extension(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_lb_edge_extension(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_lb_traffic_extension_with_metadata( + resp, _ = self._interceptor.post_get_lb_edge_extension_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = dep.LbEdgeExtension.to_json(response) except: response_payload = None http_response = { @@ -2174,21 +3053,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networkservices_v1.DepServiceClient.delete_lb_traffic_extension", + "Received response for google.cloud.networkservices_v1.DepServiceClient.get_lb_edge_extension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "DeleteLbTrafficExtension", + "rpcName": "GetLbEdgeExtension", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetAuthzExtension( - _BaseDepServiceRestTransport._BaseGetAuthzExtension, DepServiceRestStub + class _GetLbRouteExtension( + _BaseDepServiceRestTransport._BaseGetLbRouteExtension, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.GetAuthzExtension") + return hash("DepServiceRestTransport.GetLbRouteExtension") @staticmethod def _get_response( @@ -2214,17 +3093,17 @@ def _get_response( def __call__( self, - request: dep.GetAuthzExtensionRequest, + request: dep.GetLbRouteExtensionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dep.AuthzExtension: - r"""Call the get authz extension method over HTTP. + ) -> dep.LbRouteExtension: + r"""Call the get lb route extension method over HTTP. Args: - request (~.dep.GetAuthzExtensionRequest): - The request object. Message for getting a ``AuthzExtension`` resource. + request (~.dep.GetLbRouteExtensionRequest): + The request object. Message for getting a ``LbRouteExtension`` resource. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2234,26 +3113,25 @@ def __call__( be of type `bytes`. Returns: - ~.dep.AuthzExtension: - ``AuthzExtension`` is a resource that allows traffic - forwarding to a callout backend service to make an - authorization decision. + ~.dep.LbRouteExtension: + ``LbRouteExtension`` is a resource that lets you control + where traffic is routed to for a given request. """ http_options = ( - _BaseDepServiceRestTransport._BaseGetAuthzExtension._get_http_options() + _BaseDepServiceRestTransport._BaseGetLbRouteExtension._get_http_options() ) - request, metadata = self._interceptor.pre_get_authz_extension( + request, metadata = self._interceptor.pre_get_lb_route_extension( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseGetAuthzExtension._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseGetLbRouteExtension._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseGetAuthzExtension._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseGetLbRouteExtension._get_query_params_json( transcoded_request ) @@ -2275,17 +3153,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.GetAuthzExtension", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.GetLbRouteExtension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "GetAuthzExtension", + "rpcName": "GetLbRouteExtension", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._GetAuthzExtension._get_response( + response = DepServiceRestTransport._GetLbRouteExtension._get_response( self._host, metadata, query_params, @@ -2300,21 +3178,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = dep.AuthzExtension() - pb_resp = dep.AuthzExtension.pb(resp) + resp = dep.LbRouteExtension() + pb_resp = dep.LbRouteExtension.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_authz_extension(resp) + resp = self._interceptor.post_get_lb_route_extension(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_authz_extension_with_metadata( + resp, _ = self._interceptor.post_get_lb_route_extension_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = dep.AuthzExtension.to_json(response) + response_payload = dep.LbRouteExtension.to_json(response) except: response_payload = None http_response = { @@ -2323,21 +3201,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networkservices_v1.DepServiceClient.get_authz_extension", + "Received response for google.cloud.networkservices_v1.DepServiceClient.get_lb_route_extension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "GetAuthzExtension", + "rpcName": "GetLbRouteExtension", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetLbRouteExtension( - _BaseDepServiceRestTransport._BaseGetLbRouteExtension, DepServiceRestStub + class _GetLbTrafficExtension( + _BaseDepServiceRestTransport._BaseGetLbTrafficExtension, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.GetLbRouteExtension") + return hash("DepServiceRestTransport.GetLbTrafficExtension") @staticmethod def _get_response( @@ -2363,17 +3241,17 @@ def _get_response( def __call__( self, - request: dep.GetLbRouteExtensionRequest, + request: dep.GetLbTrafficExtensionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dep.LbRouteExtension: - r"""Call the get lb route extension method over HTTP. + ) -> dep.LbTrafficExtension: + r"""Call the get lb traffic extension method over HTTP. Args: - request (~.dep.GetLbRouteExtensionRequest): - The request object. Message for getting a ``LbRouteExtension`` resource. + request (~.dep.GetLbTrafficExtensionRequest): + The request object. Message for getting a ``LbTrafficExtension`` resource. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2383,25 +3261,28 @@ def __call__( be of type `bytes`. Returns: - ~.dep.LbRouteExtension: - ``LbRouteExtension`` is a resource that lets you control - where traffic is routed to for a given request. + ~.dep.LbTrafficExtension: + ``LbTrafficExtension`` is a resource that lets the + extension service modify the headers and payloads of + both requests and responses without impacting the choice + of backend services or any other security policies + associated with the backend service. """ http_options = ( - _BaseDepServiceRestTransport._BaseGetLbRouteExtension._get_http_options() + _BaseDepServiceRestTransport._BaseGetLbTrafficExtension._get_http_options() ) - request, metadata = self._interceptor.pre_get_lb_route_extension( + request, metadata = self._interceptor.pre_get_lb_traffic_extension( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseGetLbRouteExtension._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseGetLbTrafficExtension._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseGetLbRouteExtension._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseGetLbTrafficExtension._get_query_params_json( transcoded_request ) @@ -2423,17 +3304,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.GetLbRouteExtension", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.GetLbTrafficExtension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "GetLbRouteExtension", + "rpcName": "GetLbTrafficExtension", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._GetLbRouteExtension._get_response( + response = DepServiceRestTransport._GetLbTrafficExtension._get_response( self._host, metadata, query_params, @@ -2448,21 +3329,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = dep.LbRouteExtension() - pb_resp = dep.LbRouteExtension.pb(resp) + resp = dep.LbTrafficExtension() + pb_resp = dep.LbTrafficExtension.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_lb_route_extension(resp) + resp = self._interceptor.post_get_lb_traffic_extension(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_lb_route_extension_with_metadata( + resp, _ = self._interceptor.post_get_lb_traffic_extension_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = dep.LbRouteExtension.to_json(response) + response_payload = dep.LbTrafficExtension.to_json(response) except: response_payload = None http_response = { @@ -2471,21 +3352,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networkservices_v1.DepServiceClient.get_lb_route_extension", + "Received response for google.cloud.networkservices_v1.DepServiceClient.get_lb_traffic_extension", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "GetLbRouteExtension", + "rpcName": "GetLbTrafficExtension", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetLbTrafficExtension( - _BaseDepServiceRestTransport._BaseGetLbTrafficExtension, DepServiceRestStub + class _ListAuthzExtensions( + _BaseDepServiceRestTransport._BaseListAuthzExtensions, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.GetLbTrafficExtension") + return hash("DepServiceRestTransport.ListAuthzExtensions") @staticmethod def _get_response( @@ -2511,17 +3392,18 @@ def _get_response( def __call__( self, - request: dep.GetLbTrafficExtensionRequest, + request: dep.ListAuthzExtensionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dep.LbTrafficExtension: - r"""Call the get lb traffic extension method over HTTP. + ) -> dep.ListAuthzExtensionsResponse: + r"""Call the list authz extensions method over HTTP. Args: - request (~.dep.GetLbTrafficExtensionRequest): - The request object. Message for getting a ``LbTrafficExtension`` resource. + request (~.dep.ListAuthzExtensionsRequest): + The request object. Message for requesting list of ``AuthzExtension`` + resources. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2531,28 +3413,25 @@ def __call__( be of type `bytes`. Returns: - ~.dep.LbTrafficExtension: - ``LbTrafficExtension`` is a resource that lets the - extension service modify the headers and payloads of - both requests and responses without impacting the choice - of backend services or any other security policies - associated with the backend service. + ~.dep.ListAuthzExtensionsResponse: + Message for response to listing ``AuthzExtension`` + resources. """ http_options = ( - _BaseDepServiceRestTransport._BaseGetLbTrafficExtension._get_http_options() + _BaseDepServiceRestTransport._BaseListAuthzExtensions._get_http_options() ) - request, metadata = self._interceptor.pre_get_lb_traffic_extension( + request, metadata = self._interceptor.pre_list_authz_extensions( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseGetLbTrafficExtension._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseListAuthzExtensions._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseGetLbTrafficExtension._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseListAuthzExtensions._get_query_params_json( transcoded_request ) @@ -2574,17 +3453,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.GetLbTrafficExtension", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.ListAuthzExtensions", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "GetLbTrafficExtension", + "rpcName": "ListAuthzExtensions", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._GetLbTrafficExtension._get_response( + response = DepServiceRestTransport._ListAuthzExtensions._get_response( self._host, metadata, query_params, @@ -2599,21 +3478,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = dep.LbTrafficExtension() - pb_resp = dep.LbTrafficExtension.pb(resp) + resp = dep.ListAuthzExtensionsResponse() + pb_resp = dep.ListAuthzExtensionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_lb_traffic_extension(resp) + resp = self._interceptor.post_list_authz_extensions(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_lb_traffic_extension_with_metadata( + resp, _ = self._interceptor.post_list_authz_extensions_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = dep.LbTrafficExtension.to_json(response) + response_payload = dep.ListAuthzExtensionsResponse.to_json(response) except: response_payload = None http_response = { @@ -2622,21 +3501,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networkservices_v1.DepServiceClient.get_lb_traffic_extension", + "Received response for google.cloud.networkservices_v1.DepServiceClient.list_authz_extensions", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "GetLbTrafficExtension", + "rpcName": "ListAuthzExtensions", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListAuthzExtensions( - _BaseDepServiceRestTransport._BaseListAuthzExtensions, DepServiceRestStub + class _ListLbEdgeExtensions( + _BaseDepServiceRestTransport._BaseListLbEdgeExtensions, DepServiceRestStub ): def __hash__(self): - return hash("DepServiceRestTransport.ListAuthzExtensions") + return hash("DepServiceRestTransport.ListLbEdgeExtensions") @staticmethod def _get_response( @@ -2662,17 +3541,17 @@ def _get_response( def __call__( self, - request: dep.ListAuthzExtensionsRequest, + request: dep.ListLbEdgeExtensionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> dep.ListAuthzExtensionsResponse: - r"""Call the list authz extensions method over HTTP. + ) -> dep.ListLbEdgeExtensionsResponse: + r"""Call the list lb edge extensions method over HTTP. Args: - request (~.dep.ListAuthzExtensionsRequest): - The request object. Message for requesting list of ``AuthzExtension`` + request (~.dep.ListLbEdgeExtensionsRequest): + The request object. Message for requesting list of ``LbEdgeExtension`` resources. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -2683,25 +3562,25 @@ def __call__( be of type `bytes`. Returns: - ~.dep.ListAuthzExtensionsResponse: - Message for response to listing ``AuthzExtension`` + ~.dep.ListLbEdgeExtensionsResponse: + Message for response to listing ``LbEdgeExtension`` resources. """ http_options = ( - _BaseDepServiceRestTransport._BaseListAuthzExtensions._get_http_options() + _BaseDepServiceRestTransport._BaseListLbEdgeExtensions._get_http_options() ) - request, metadata = self._interceptor.pre_list_authz_extensions( + request, metadata = self._interceptor.pre_list_lb_edge_extensions( request, metadata ) - transcoded_request = _BaseDepServiceRestTransport._BaseListAuthzExtensions._get_transcoded_request( + transcoded_request = _BaseDepServiceRestTransport._BaseListLbEdgeExtensions._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseDepServiceRestTransport._BaseListAuthzExtensions._get_query_params_json( + query_params = _BaseDepServiceRestTransport._BaseListLbEdgeExtensions._get_query_params_json( transcoded_request ) @@ -2723,17 +3602,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.networkservices_v1.DepServiceClient.ListAuthzExtensions", + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.ListLbEdgeExtensions", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "ListAuthzExtensions", + "rpcName": "ListLbEdgeExtensions", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = DepServiceRestTransport._ListAuthzExtensions._get_response( + response = DepServiceRestTransport._ListLbEdgeExtensions._get_response( self._host, metadata, query_params, @@ -2748,21 +3627,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = dep.ListAuthzExtensionsResponse() - pb_resp = dep.ListAuthzExtensionsResponse.pb(resp) + resp = dep.ListLbEdgeExtensionsResponse() + pb_resp = dep.ListLbEdgeExtensionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_authz_extensions(resp) + resp = self._interceptor.post_list_lb_edge_extensions(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_authz_extensions_with_metadata( + resp, _ = self._interceptor.post_list_lb_edge_extensions_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = dep.ListAuthzExtensionsResponse.to_json(response) + response_payload = dep.ListLbEdgeExtensionsResponse.to_json( + response + ) except: response_payload = None http_response = { @@ -2771,10 +3652,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.networkservices_v1.DepServiceClient.list_authz_extensions", + "Received response for google.cloud.networkservices_v1.DepServiceClient.list_lb_edge_extensions", extra={ "serviceName": "google.cloud.networkservices.v1.DepService", - "rpcName": "ListAuthzExtensions", + "rpcName": "ListLbEdgeExtensions", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -3237,6 +4118,159 @@ def __call__( ) return resp + class _UpdateLbEdgeExtension( + _BaseDepServiceRestTransport._BaseUpdateLbEdgeExtension, DepServiceRestStub + ): + def __hash__(self): + return hash("DepServiceRestTransport.UpdateLbEdgeExtension") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: dep.UpdateLbEdgeExtensionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update lb edge extension method over HTTP. + + Args: + request (~.dep.UpdateLbEdgeExtensionRequest): + The request object. Message for updating a ``LbEdgeExtension`` resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseDepServiceRestTransport._BaseUpdateLbEdgeExtension._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_lb_edge_extension( + request, metadata + ) + transcoded_request = _BaseDepServiceRestTransport._BaseUpdateLbEdgeExtension._get_transcoded_request( + http_options, request + ) + + body = _BaseDepServiceRestTransport._BaseUpdateLbEdgeExtension._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDepServiceRestTransport._BaseUpdateLbEdgeExtension._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.networkservices_v1.DepServiceClient.UpdateLbEdgeExtension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "UpdateLbEdgeExtension", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DepServiceRestTransport._UpdateLbEdgeExtension._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_lb_edge_extension(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_lb_edge_extension_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.networkservices_v1.DepServiceClient.update_lb_edge_extension", + extra={ + "serviceName": "google.cloud.networkservices.v1.DepService", + "rpcName": "UpdateLbEdgeExtension", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _UpdateLbRouteExtension( _BaseDepServiceRestTransport._BaseUpdateLbRouteExtension, DepServiceRestStub ): @@ -3552,6 +4586,14 @@ def create_authz_extension( # In C++ this would require a dynamic_cast return self._CreateAuthzExtension(self._session, self._host, self._interceptor) # type: ignore + @property + def create_lb_edge_extension( + self, + ) -> Callable[[dep.CreateLbEdgeExtensionRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateLbEdgeExtension(self._session, self._host, self._interceptor) # type: ignore + @property def create_lb_route_extension( self, @@ -3576,6 +4618,14 @@ def delete_authz_extension( # In C++ this would require a dynamic_cast return self._DeleteAuthzExtension(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_lb_edge_extension( + self, + ) -> Callable[[dep.DeleteLbEdgeExtensionRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteLbEdgeExtension(self._session, self._host, self._interceptor) # type: ignore + @property def delete_lb_route_extension( self, @@ -3600,6 +4650,14 @@ def get_authz_extension( # In C++ this would require a dynamic_cast return self._GetAuthzExtension(self._session, self._host, self._interceptor) # type: ignore + @property + def get_lb_edge_extension( + self, + ) -> Callable[[dep.GetLbEdgeExtensionRequest], dep.LbEdgeExtension]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetLbEdgeExtension(self._session, self._host, self._interceptor) # type: ignore + @property def get_lb_route_extension( self, @@ -3624,6 +4682,14 @@ def list_authz_extensions( # In C++ this would require a dynamic_cast return self._ListAuthzExtensions(self._session, self._host, self._interceptor) # type: ignore + @property + def list_lb_edge_extensions( + self, + ) -> Callable[[dep.ListLbEdgeExtensionsRequest], dep.ListLbEdgeExtensionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLbEdgeExtensions(self._session, self._host, self._interceptor) # type: ignore + @property def list_lb_route_extensions( self, @@ -3652,6 +4718,14 @@ def update_authz_extension( # In C++ this would require a dynamic_cast return self._UpdateAuthzExtension(self._session, self._host, self._interceptor) # type: ignore + @property + def update_lb_edge_extension( + self, + ) -> Callable[[dep.UpdateLbEdgeExtensionRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateLbEdgeExtension(self._session, self._host, self._interceptor) # type: ignore + @property def update_lb_route_extension( self, diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest_base.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest_base.py index 0ed4a2cb444a..fb15b009e58a 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest_base.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/services/dep_service/transports/rest_base.py @@ -150,6 +150,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateLbEdgeExtension: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "lbEdgeExtensionId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/lbEdgeExtensions", + "body": "lb_edge_extension", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dep.CreateLbEdgeExtensionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDepServiceRestTransport._BaseCreateLbEdgeExtension._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateLbRouteExtension: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -315,6 +374,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteLbEdgeExtension: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/lbEdgeExtensions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dep.DeleteLbEdgeExtensionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDepServiceRestTransport._BaseDeleteLbEdgeExtension._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteLbRouteExtension: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -456,6 +562,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetLbEdgeExtension: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/lbEdgeExtensions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dep.GetLbEdgeExtensionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDepServiceRestTransport._BaseGetLbEdgeExtension._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetLbRouteExtension: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -597,6 +750,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListLbEdgeExtensions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/lbEdgeExtensions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dep.ListLbEdgeExtensionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDepServiceRestTransport._BaseListLbEdgeExtensions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListLbRouteExtensions: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -750,6 +950,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateLbEdgeExtension: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{lb_edge_extension.name=projects/*/locations/*/lbEdgeExtensions/*}", + "body": "lb_edge_extension", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = dep.UpdateLbEdgeExtensionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDepServiceRestTransport._BaseUpdateLbEdgeExtension._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateLbRouteExtension: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/types/__init__.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/types/__init__.py index 20b8163b0aae..d86d513073b2 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/types/__init__.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/types/__init__.py @@ -22,26 +22,33 @@ from .dep import ( AuthzExtension, CreateAuthzExtensionRequest, + CreateLbEdgeExtensionRequest, CreateLbRouteExtensionRequest, CreateLbTrafficExtensionRequest, DeleteAuthzExtensionRequest, + DeleteLbEdgeExtensionRequest, DeleteLbRouteExtensionRequest, DeleteLbTrafficExtensionRequest, EventType, ExtensionChain, GetAuthzExtensionRequest, + GetLbEdgeExtensionRequest, GetLbRouteExtensionRequest, GetLbTrafficExtensionRequest, + LbEdgeExtension, LbRouteExtension, LbTrafficExtension, ListAuthzExtensionsRequest, ListAuthzExtensionsResponse, + ListLbEdgeExtensionsRequest, + ListLbEdgeExtensionsResponse, ListLbRouteExtensionsRequest, ListLbRouteExtensionsResponse, ListLbTrafficExtensionsRequest, ListLbTrafficExtensionsResponse, LoadBalancingScheme, UpdateAuthzExtensionRequest, + UpdateLbEdgeExtensionRequest, UpdateLbRouteExtensionRequest, UpdateLbTrafficExtensionRequest, WireFormat, @@ -161,24 +168,31 @@ "EnvoyHeaders", "AuthzExtension", "CreateAuthzExtensionRequest", + "CreateLbEdgeExtensionRequest", "CreateLbRouteExtensionRequest", "CreateLbTrafficExtensionRequest", "DeleteAuthzExtensionRequest", + "DeleteLbEdgeExtensionRequest", "DeleteLbRouteExtensionRequest", "DeleteLbTrafficExtensionRequest", "ExtensionChain", "GetAuthzExtensionRequest", + "GetLbEdgeExtensionRequest", "GetLbRouteExtensionRequest", "GetLbTrafficExtensionRequest", + "LbEdgeExtension", "LbRouteExtension", "LbTrafficExtension", "ListAuthzExtensionsRequest", "ListAuthzExtensionsResponse", + "ListLbEdgeExtensionsRequest", + "ListLbEdgeExtensionsResponse", "ListLbRouteExtensionsRequest", "ListLbRouteExtensionsResponse", "ListLbTrafficExtensionsRequest", "ListLbTrafficExtensionsResponse", "UpdateAuthzExtensionRequest", + "UpdateLbEdgeExtensionRequest", "UpdateLbRouteExtensionRequest", "UpdateLbTrafficExtensionRequest", "EventType", diff --git a/packages/google-cloud-network-services/google/cloud/network_services_v1/types/dep.py b/packages/google-cloud-network-services/google/cloud/network_services_v1/types/dep.py index c18a73eef36e..35e96df7fcdd 100644 --- a/packages/google-cloud-network-services/google/cloud/network_services_v1/types/dep.py +++ b/packages/google-cloud-network-services/google/cloud/network_services_v1/types/dep.py @@ -44,6 +44,13 @@ "CreateLbRouteExtensionRequest", "UpdateLbRouteExtensionRequest", "DeleteLbRouteExtensionRequest", + "LbEdgeExtension", + "ListLbEdgeExtensionsRequest", + "ListLbEdgeExtensionsResponse", + "GetLbEdgeExtensionRequest", + "CreateLbEdgeExtensionRequest", + "UpdateLbEdgeExtensionRequest", + "DeleteLbEdgeExtensionRequest", "AuthzExtension", "ListAuthzExtensionsRequest", "ListAuthzExtensionsResponse", @@ -1047,6 +1054,324 @@ class DeleteLbRouteExtensionRequest(proto.Message): ) +class LbEdgeExtension(proto.Message): + r"""``LbEdgeExtension`` is a resource that lets the extension service + influence the selection of backend services and Cloud CDN cache keys + by modifying request headers. + + Attributes: + name (str): + Required. Identifier. Name of the ``LbEdgeExtension`` + resource in the following format: + ``projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the resource + was updated. + description (str): + Optional. A human-readable description of the + resource. + labels (MutableMapping[str, str]): + Optional. Set of labels associated with the + ``LbEdgeExtension`` resource. + + The format must comply with `the requirements for + labels `__ + for Google Cloud resources. + forwarding_rules (MutableSequence[str]): + Required. A list of references to the forwarding rules to + which this service extension is attached. At least one + forwarding rule is required. Only one ``LbEdgeExtension`` + resource can be associated with a forwarding rule. + extension_chains (MutableSequence[google.cloud.network_services_v1.types.ExtensionChain]): + Required. A set of ordered extension chains + that contain the match conditions and extensions + to execute. Match conditions for each extension + chain are evaluated in sequence for a given + request. The first extension chain that has a + condition that matches the request is executed. + Any subsequent extension chains do not execute. + Limited to 5 extension chains per resource. + load_balancing_scheme (google.cloud.network_services_v1.types.LoadBalancingScheme): + Required. All forwarding rules referenced by this extension + must share the same load balancing scheme. Supported values: + ``EXTERNAL_MANAGED``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=9, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + forwarding_rules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + extension_chains: MutableSequence["ExtensionChain"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="ExtensionChain", + ) + load_balancing_scheme: "LoadBalancingScheme" = proto.Field( + proto.ENUM, + number=7, + enum="LoadBalancingScheme", + ) + + +class ListLbEdgeExtensionsRequest(proto.Message): + r"""Message for requesting list of ``LbEdgeExtension`` resources. + + Attributes: + parent (str): + Required. The project and location from which the + ``LbEdgeExtension`` resources are listed. These values are + specified in the following format: + ``projects/{project}/locations/{location}``. + page_size (int): + Optional. Requested page size. The server + might return fewer items than requested. If + unspecified, the server picks an appropriate + default. + page_token (str): + Optional. A token identifying a page of + results that the server returns. + filter (str): + Optional. Filtering results. + order_by (str): + Optional. Hint about how to order the + results. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListLbEdgeExtensionsResponse(proto.Message): + r"""Message for response to listing ``LbEdgeExtension`` resources. + + Attributes: + lb_edge_extensions (MutableSequence[google.cloud.network_services_v1.types.LbEdgeExtension]): + The list of ``LbEdgeExtension`` resources. + next_page_token (str): + A token identifying a page of results that + the server returns. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + lb_edge_extensions: MutableSequence["LbEdgeExtension"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LbEdgeExtension", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetLbEdgeExtensionRequest(proto.Message): + r"""Message for getting a ``LbEdgeExtension`` resource. + + Attributes: + name (str): + Required. A name of the ``LbEdgeExtension`` resource to get. + Must be in the format + ``projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateLbEdgeExtensionRequest(proto.Message): + r"""Message for creating a ``LbEdgeExtension`` resource. + + Attributes: + parent (str): + Required. The parent resource of the ``LbEdgeExtension`` + resource. Must be in the format + ``projects/{project}/locations/{location}``. + lb_edge_extension_id (str): + Required. User-provided ID of the ``LbEdgeExtension`` + resource to be created. + lb_edge_extension (google.cloud.network_services_v1.types.LbEdgeExtension): + Required. ``LbEdgeExtension`` resource to be created. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server can + ignore the request if it has already been + completed. The server guarantees that for 60 + minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server ignores the second request This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + lb_edge_extension_id: str = proto.Field( + proto.STRING, + number=2, + ) + lb_edge_extension: "LbEdgeExtension" = proto.Field( + proto.MESSAGE, + number=3, + message="LbEdgeExtension", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateLbEdgeExtensionRequest(proto.Message): + r"""Message for updating a ``LbEdgeExtension`` resource. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Used to specify the fields to be overwritten in + the ``LbEdgeExtension`` resource by the update. The fields + specified in the ``update_mask`` are relative to the + resource, not the full request. A field is overwritten if it + is in the mask. If the user does not specify a mask, then + all fields are overwritten. + lb_edge_extension (google.cloud.network_services_v1.types.LbEdgeExtension): + Required. ``LbEdgeExtension`` resource being updated. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server can + ignore the request if it has already been + completed. The server guarantees that for 60 + minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server ignores the second request This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + lb_edge_extension: "LbEdgeExtension" = proto.Field( + proto.MESSAGE, + number=2, + message="LbEdgeExtension", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteLbEdgeExtensionRequest(proto.Message): + r"""Message for deleting a ``LbEdgeExtension`` resource. + + Attributes: + name (str): + Required. The name of the ``LbEdgeExtension`` resource to + delete. Must be in the format + ``projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}``. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server can + ignore the request if it has already been + completed. The server guarantees that for 60 + minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server ignores the second request This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + class AuthzExtension(proto.Message): r"""``AuthzExtension`` is a resource that allows traffic forwarding to a callout backend service to make an authorization decision. diff --git a/packages/google-cloud-network-services/noxfile.py b/packages/google-cloud-network-services/noxfile.py index d870c08c5203..15f4ec70c077 100644 --- a/packages/google-cloud-network-services/noxfile.py +++ b/packages/google-cloud-network-services/noxfile.py @@ -27,6 +27,10 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") + ALL_PYTHON = [ "3.7", "3.8", diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_authz_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_authz_extension_async.py index 5b3af4ed27cb..cb72c4c6714b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_authz_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_authz_extension_async.py @@ -61,4 +61,5 @@ async def sample_create_authz_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_CreateAuthzExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_authz_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_authz_extension_sync.py index 4fa197c58e7c..7a6e66e55449 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_authz_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_authz_extension_sync.py @@ -61,4 +61,5 @@ def sample_create_authz_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_CreateAuthzExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_edge_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_edge_extension_async.py new file mode 100644 index 000000000000..054ec17c888e --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_edge_extension_async.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLbEdgeExtension +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_CreateLbEdgeExtension_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_create_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + lb_edge_extension = network_services_v1.LbEdgeExtension() + lb_edge_extension.name = "name_value" + lb_edge_extension.forwarding_rules = [ + "forwarding_rules_value1", + "forwarding_rules_value2", + ] + lb_edge_extension.extension_chains.name = "name_value" + lb_edge_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) + lb_edge_extension.extension_chains.extensions.name = "name_value" + lb_edge_extension.extension_chains.extensions.service = "service_value" + lb_edge_extension.load_balancing_scheme = "EXTERNAL_MANAGED" + + request = network_services_v1.CreateLbEdgeExtensionRequest( + parent="parent_value", + lb_edge_extension_id="lb_edge_extension_id_value", + lb_edge_extension=lb_edge_extension, + ) + + # Make the request + operation = client.create_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networkservices_v1_generated_DepService_CreateLbEdgeExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_edge_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_edge_extension_sync.py new file mode 100644 index 000000000000..56721671a022 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_edge_extension_sync.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLbEdgeExtension +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_CreateLbEdgeExtension_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_create_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + lb_edge_extension = network_services_v1.LbEdgeExtension() + lb_edge_extension.name = "name_value" + lb_edge_extension.forwarding_rules = [ + "forwarding_rules_value1", + "forwarding_rules_value2", + ] + lb_edge_extension.extension_chains.name = "name_value" + lb_edge_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) + lb_edge_extension.extension_chains.extensions.name = "name_value" + lb_edge_extension.extension_chains.extensions.service = "service_value" + lb_edge_extension.load_balancing_scheme = "EXTERNAL_MANAGED" + + request = network_services_v1.CreateLbEdgeExtensionRequest( + parent="parent_value", + lb_edge_extension_id="lb_edge_extension_id_value", + lb_edge_extension=lb_edge_extension, + ) + + # Make the request + operation = client.create_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networkservices_v1_generated_DepService_CreateLbEdgeExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_route_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_route_extension_async.py index 0dc46982fa9f..8afb9f810d9f 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_route_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_route_extension_async.py @@ -41,9 +41,14 @@ async def sample_create_lb_route_extension(): # Initialize request argument(s) lb_route_extension = network_services_v1.LbRouteExtension() lb_route_extension.name = "name_value" - lb_route_extension.forwarding_rules = ['forwarding_rules_value1', 'forwarding_rules_value2'] + lb_route_extension.forwarding_rules = [ + "forwarding_rules_value1", + "forwarding_rules_value2", + ] lb_route_extension.extension_chains.name = "name_value" - lb_route_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_route_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) lb_route_extension.extension_chains.extensions.name = "name_value" lb_route_extension.extension_chains.extensions.service = "service_value" lb_route_extension.load_balancing_scheme = "EXTERNAL_MANAGED" @@ -64,4 +69,5 @@ async def sample_create_lb_route_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_CreateLbRouteExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_route_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_route_extension_sync.py index 07f8eb3b5865..ac71a2769814 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_route_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_route_extension_sync.py @@ -41,9 +41,14 @@ def sample_create_lb_route_extension(): # Initialize request argument(s) lb_route_extension = network_services_v1.LbRouteExtension() lb_route_extension.name = "name_value" - lb_route_extension.forwarding_rules = ['forwarding_rules_value1', 'forwarding_rules_value2'] + lb_route_extension.forwarding_rules = [ + "forwarding_rules_value1", + "forwarding_rules_value2", + ] lb_route_extension.extension_chains.name = "name_value" - lb_route_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_route_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) lb_route_extension.extension_chains.extensions.name = "name_value" lb_route_extension.extension_chains.extensions.service = "service_value" lb_route_extension.load_balancing_scheme = "EXTERNAL_MANAGED" @@ -64,4 +69,5 @@ def sample_create_lb_route_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_CreateLbRouteExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_traffic_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_traffic_extension_async.py index 78deb1e75d32..be70dfdf4d4e 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_traffic_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_traffic_extension_async.py @@ -42,7 +42,9 @@ async def sample_create_lb_traffic_extension(): lb_traffic_extension = network_services_v1.LbTrafficExtension() lb_traffic_extension.name = "name_value" lb_traffic_extension.extension_chains.name = "name_value" - lb_traffic_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_traffic_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) lb_traffic_extension.extension_chains.extensions.name = "name_value" lb_traffic_extension.extension_chains.extensions.service = "service_value" lb_traffic_extension.load_balancing_scheme = "EXTERNAL_MANAGED" @@ -63,4 +65,5 @@ async def sample_create_lb_traffic_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_CreateLbTrafficExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_traffic_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_traffic_extension_sync.py index 94809575d585..3da96682c2f8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_traffic_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_create_lb_traffic_extension_sync.py @@ -42,7 +42,9 @@ def sample_create_lb_traffic_extension(): lb_traffic_extension = network_services_v1.LbTrafficExtension() lb_traffic_extension.name = "name_value" lb_traffic_extension.extension_chains.name = "name_value" - lb_traffic_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_traffic_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) lb_traffic_extension.extension_chains.extensions.name = "name_value" lb_traffic_extension.extension_chains.extensions.service = "service_value" lb_traffic_extension.load_balancing_scheme = "EXTERNAL_MANAGED" @@ -63,4 +65,5 @@ def sample_create_lb_traffic_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_CreateLbTrafficExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_authz_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_authz_extension_async.py index 3112edaba8ec..081f3ce6fb98 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_authz_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_authz_extension_async.py @@ -53,4 +53,5 @@ async def sample_delete_authz_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_DeleteAuthzExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_authz_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_authz_extension_sync.py index 75e42ccb29a3..ca28fc42e332 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_authz_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_authz_extension_sync.py @@ -53,4 +53,5 @@ def sample_delete_authz_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_DeleteAuthzExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_edge_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_edge_extension_async.py new file mode 100644 index 000000000000..f497f12d4682 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_edge_extension_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLbEdgeExtension +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_DeleteLbEdgeExtension_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_delete_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteLbEdgeExtensionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networkservices_v1_generated_DepService_DeleteLbEdgeExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_edge_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_edge_extension_sync.py new file mode 100644 index 000000000000..c777d69c065a --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_edge_extension_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLbEdgeExtension +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_DeleteLbEdgeExtension_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_delete_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + request = network_services_v1.DeleteLbEdgeExtensionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networkservices_v1_generated_DepService_DeleteLbEdgeExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_route_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_route_extension_async.py index d8a667a96c53..032d0481cb0b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_route_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_route_extension_async.py @@ -53,4 +53,5 @@ async def sample_delete_lb_route_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_DeleteLbRouteExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_route_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_route_extension_sync.py index e15091ad2bbd..8e2068a77197 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_route_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_route_extension_sync.py @@ -53,4 +53,5 @@ def sample_delete_lb_route_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_DeleteLbRouteExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_traffic_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_traffic_extension_async.py index 14e99dc056bf..92b53dc01e7b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_traffic_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_traffic_extension_async.py @@ -53,4 +53,5 @@ async def sample_delete_lb_traffic_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_DeleteLbTrafficExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_traffic_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_traffic_extension_sync.py index 725af04de6ab..1584467477cd 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_traffic_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_delete_lb_traffic_extension_sync.py @@ -53,4 +53,5 @@ def sample_delete_lb_traffic_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_DeleteLbTrafficExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_authz_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_authz_extension_async.py index 7fe89a2e0504..845ad80d6084 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_authz_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_authz_extension_async.py @@ -49,4 +49,5 @@ async def sample_get_authz_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_GetAuthzExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_authz_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_authz_extension_sync.py index 747650f391de..2ef96d3ed991 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_authz_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_authz_extension_sync.py @@ -49,4 +49,5 @@ def sample_get_authz_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_GetAuthzExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_edge_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_edge_extension_async.py new file mode 100644 index 000000000000..888acf6129c8 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_edge_extension_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLbEdgeExtension +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_GetLbEdgeExtension_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_get_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.GetLbEdgeExtensionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_lb_edge_extension(request=request) + + # Handle the response + print(response) + + +# [END networkservices_v1_generated_DepService_GetLbEdgeExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_edge_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_edge_extension_sync.py new file mode 100644 index 000000000000..bfe7933eff92 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_edge_extension_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLbEdgeExtension +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_GetLbEdgeExtension_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_get_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + request = network_services_v1.GetLbEdgeExtensionRequest( + name="name_value", + ) + + # Make the request + response = client.get_lb_edge_extension(request=request) + + # Handle the response + print(response) + + +# [END networkservices_v1_generated_DepService_GetLbEdgeExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_route_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_route_extension_async.py index 8f7b6bddae5c..9ad6ad146b6c 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_route_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_route_extension_async.py @@ -49,4 +49,5 @@ async def sample_get_lb_route_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_GetLbRouteExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_route_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_route_extension_sync.py index 373fc940a2e8..ec139a203d8c 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_route_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_route_extension_sync.py @@ -49,4 +49,5 @@ def sample_get_lb_route_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_GetLbRouteExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_traffic_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_traffic_extension_async.py index 2b9675d6fc4d..aba40bf71714 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_traffic_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_traffic_extension_async.py @@ -49,4 +49,5 @@ async def sample_get_lb_traffic_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_GetLbTrafficExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_traffic_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_traffic_extension_sync.py index cc6e6ccccc6b..5806379de175 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_traffic_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_get_lb_traffic_extension_sync.py @@ -49,4 +49,5 @@ def sample_get_lb_traffic_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_GetLbTrafficExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_authz_extensions_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_authz_extensions_async.py index 9a393bb6b717..1779f76b775e 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_authz_extensions_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_authz_extensions_async.py @@ -50,4 +50,5 @@ async def sample_list_authz_extensions(): async for response in page_result: print(response) + # [END networkservices_v1_generated_DepService_ListAuthzExtensions_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_authz_extensions_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_authz_extensions_sync.py index ca53f42ce535..ae3225e0c86c 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_authz_extensions_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_authz_extensions_sync.py @@ -50,4 +50,5 @@ def sample_list_authz_extensions(): for response in page_result: print(response) + # [END networkservices_v1_generated_DepService_ListAuthzExtensions_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_edge_extensions_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_edge_extensions_async.py new file mode 100644 index 000000000000..dcd10df2447d --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_edge_extensions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLbEdgeExtensions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_ListLbEdgeExtensions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_list_lb_edge_extensions(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + request = network_services_v1.ListLbEdgeExtensionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lb_edge_extensions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END networkservices_v1_generated_DepService_ListLbEdgeExtensions_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_edge_extensions_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_edge_extensions_sync.py new file mode 100644 index 000000000000..536f6e37f50a --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_edge_extensions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLbEdgeExtensions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_ListLbEdgeExtensions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_list_lb_edge_extensions(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + request = network_services_v1.ListLbEdgeExtensionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_lb_edge_extensions(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END networkservices_v1_generated_DepService_ListLbEdgeExtensions_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_route_extensions_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_route_extensions_async.py index 35d621f9c980..2faa491361cf 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_route_extensions_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_route_extensions_async.py @@ -50,4 +50,5 @@ async def sample_list_lb_route_extensions(): async for response in page_result: print(response) + # [END networkservices_v1_generated_DepService_ListLbRouteExtensions_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_route_extensions_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_route_extensions_sync.py index 8de1f1d4f339..f9878d20a3df 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_route_extensions_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_route_extensions_sync.py @@ -50,4 +50,5 @@ def sample_list_lb_route_extensions(): for response in page_result: print(response) + # [END networkservices_v1_generated_DepService_ListLbRouteExtensions_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_traffic_extensions_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_traffic_extensions_async.py index 990196dc2efa..54eba88aff90 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_traffic_extensions_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_traffic_extensions_async.py @@ -50,4 +50,5 @@ async def sample_list_lb_traffic_extensions(): async for response in page_result: print(response) + # [END networkservices_v1_generated_DepService_ListLbTrafficExtensions_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_traffic_extensions_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_traffic_extensions_sync.py index 1930c792ec6f..10f33caef68b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_traffic_extensions_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_list_lb_traffic_extensions_sync.py @@ -50,4 +50,5 @@ def sample_list_lb_traffic_extensions(): for response in page_result: print(response) + # [END networkservices_v1_generated_DepService_ListLbTrafficExtensions_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_authz_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_authz_extension_async.py index 29f6b82f8544..123f74412782 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_authz_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_authz_extension_async.py @@ -59,4 +59,5 @@ async def sample_update_authz_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_UpdateAuthzExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_authz_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_authz_extension_sync.py index 4bd9ca275019..c48567bf8175 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_authz_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_authz_extension_sync.py @@ -59,4 +59,5 @@ def sample_update_authz_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_UpdateAuthzExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_edge_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_edge_extension_async.py new file mode 100644 index 000000000000..92ae26434a7c --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_edge_extension_async.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLbEdgeExtension +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_UpdateLbEdgeExtension_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +async def sample_update_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceAsyncClient() + + # Initialize request argument(s) + lb_edge_extension = network_services_v1.LbEdgeExtension() + lb_edge_extension.name = "name_value" + lb_edge_extension.forwarding_rules = [ + "forwarding_rules_value1", + "forwarding_rules_value2", + ] + lb_edge_extension.extension_chains.name = "name_value" + lb_edge_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) + lb_edge_extension.extension_chains.extensions.name = "name_value" + lb_edge_extension.extension_chains.extensions.service = "service_value" + lb_edge_extension.load_balancing_scheme = "EXTERNAL_MANAGED" + + request = network_services_v1.UpdateLbEdgeExtensionRequest( + lb_edge_extension=lb_edge_extension, + ) + + # Make the request + operation = client.update_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END networkservices_v1_generated_DepService_UpdateLbEdgeExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_edge_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_edge_extension_sync.py new file mode 100644 index 000000000000..069c307e6a18 --- /dev/null +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_edge_extension_sync.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLbEdgeExtension +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-network-services + + +# [START networkservices_v1_generated_DepService_UpdateLbEdgeExtension_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import network_services_v1 + + +def sample_update_lb_edge_extension(): + # Create a client + client = network_services_v1.DepServiceClient() + + # Initialize request argument(s) + lb_edge_extension = network_services_v1.LbEdgeExtension() + lb_edge_extension.name = "name_value" + lb_edge_extension.forwarding_rules = [ + "forwarding_rules_value1", + "forwarding_rules_value2", + ] + lb_edge_extension.extension_chains.name = "name_value" + lb_edge_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) + lb_edge_extension.extension_chains.extensions.name = "name_value" + lb_edge_extension.extension_chains.extensions.service = "service_value" + lb_edge_extension.load_balancing_scheme = "EXTERNAL_MANAGED" + + request = network_services_v1.UpdateLbEdgeExtensionRequest( + lb_edge_extension=lb_edge_extension, + ) + + # Make the request + operation = client.update_lb_edge_extension(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END networkservices_v1_generated_DepService_UpdateLbEdgeExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_route_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_route_extension_async.py index 027c323c6061..fca3b9618ea5 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_route_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_route_extension_async.py @@ -41,9 +41,14 @@ async def sample_update_lb_route_extension(): # Initialize request argument(s) lb_route_extension = network_services_v1.LbRouteExtension() lb_route_extension.name = "name_value" - lb_route_extension.forwarding_rules = ['forwarding_rules_value1', 'forwarding_rules_value2'] + lb_route_extension.forwarding_rules = [ + "forwarding_rules_value1", + "forwarding_rules_value2", + ] lb_route_extension.extension_chains.name = "name_value" - lb_route_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_route_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) lb_route_extension.extension_chains.extensions.name = "name_value" lb_route_extension.extension_chains.extensions.service = "service_value" lb_route_extension.load_balancing_scheme = "EXTERNAL_MANAGED" @@ -62,4 +67,5 @@ async def sample_update_lb_route_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_UpdateLbRouteExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_route_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_route_extension_sync.py index ba6be9b9f873..5270ad3be7ff 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_route_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_route_extension_sync.py @@ -41,9 +41,14 @@ def sample_update_lb_route_extension(): # Initialize request argument(s) lb_route_extension = network_services_v1.LbRouteExtension() lb_route_extension.name = "name_value" - lb_route_extension.forwarding_rules = ['forwarding_rules_value1', 'forwarding_rules_value2'] + lb_route_extension.forwarding_rules = [ + "forwarding_rules_value1", + "forwarding_rules_value2", + ] lb_route_extension.extension_chains.name = "name_value" - lb_route_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_route_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) lb_route_extension.extension_chains.extensions.name = "name_value" lb_route_extension.extension_chains.extensions.service = "service_value" lb_route_extension.load_balancing_scheme = "EXTERNAL_MANAGED" @@ -62,4 +67,5 @@ def sample_update_lb_route_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_UpdateLbRouteExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_traffic_extension_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_traffic_extension_async.py index 0532c47379a5..b73ec3290809 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_traffic_extension_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_traffic_extension_async.py @@ -42,7 +42,9 @@ async def sample_update_lb_traffic_extension(): lb_traffic_extension = network_services_v1.LbTrafficExtension() lb_traffic_extension.name = "name_value" lb_traffic_extension.extension_chains.name = "name_value" - lb_traffic_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_traffic_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) lb_traffic_extension.extension_chains.extensions.name = "name_value" lb_traffic_extension.extension_chains.extensions.service = "service_value" lb_traffic_extension.load_balancing_scheme = "EXTERNAL_MANAGED" @@ -61,4 +63,5 @@ async def sample_update_lb_traffic_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_UpdateLbTrafficExtension_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_traffic_extension_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_traffic_extension_sync.py index 133091d8fac7..533af0431909 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_traffic_extension_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_dep_service_update_lb_traffic_extension_sync.py @@ -42,7 +42,9 @@ def sample_update_lb_traffic_extension(): lb_traffic_extension = network_services_v1.LbTrafficExtension() lb_traffic_extension.name = "name_value" lb_traffic_extension.extension_chains.name = "name_value" - lb_traffic_extension.extension_chains.match_condition.cel_expression = "cel_expression_value" + lb_traffic_extension.extension_chains.match_condition.cel_expression = ( + "cel_expression_value" + ) lb_traffic_extension.extension_chains.extensions.name = "name_value" lb_traffic_extension.extension_chains.extensions.service = "service_value" lb_traffic_extension.load_balancing_scheme = "EXTERNAL_MANAGED" @@ -61,4 +63,5 @@ def sample_update_lb_traffic_extension(): # Handle the response print(response) + # [END networkservices_v1_generated_DepService_UpdateLbTrafficExtension_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_async.py index 01da94c110ee..23ec6e0ef8c2 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_async.py @@ -58,4 +58,5 @@ async def sample_create_endpoint_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateEndpointPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_sync.py index 404eaea02693..8cc8c3bec489 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_endpoint_policy_sync.py @@ -58,4 +58,5 @@ def sample_create_endpoint_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateEndpointPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_async.py index bb13d38ed93e..b4d63a46260a 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_async.py @@ -58,4 +58,5 @@ async def sample_create_gateway(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateGateway_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_sync.py index 7a8bf49157fb..561cd90adcab 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_gateway_sync.py @@ -58,4 +58,5 @@ def sample_create_gateway(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateGateway_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_async.py index 0fc829659036..945c7eaa6ed8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_async.py @@ -40,7 +40,7 @@ async def sample_create_grpc_route(): # Initialize request argument(s) grpc_route = network_services_v1.GrpcRoute() - grpc_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + grpc_route.hostnames = ["hostnames_value1", "hostnames_value2"] request = network_services_v1.CreateGrpcRouteRequest( parent="parent_value", @@ -58,4 +58,5 @@ async def sample_create_grpc_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateGrpcRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_sync.py index fa46c739ef81..6e9c6296b112 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_grpc_route_sync.py @@ -40,7 +40,7 @@ def sample_create_grpc_route(): # Initialize request argument(s) grpc_route = network_services_v1.GrpcRoute() - grpc_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + grpc_route.hostnames = ["hostnames_value1", "hostnames_value2"] request = network_services_v1.CreateGrpcRouteRequest( parent="parent_value", @@ -58,4 +58,5 @@ def sample_create_grpc_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateGrpcRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_async.py index 52a0e4f942d7..0a84d0f8da3d 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_async.py @@ -40,7 +40,7 @@ async def sample_create_http_route(): # Initialize request argument(s) http_route = network_services_v1.HttpRoute() - http_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + http_route.hostnames = ["hostnames_value1", "hostnames_value2"] request = network_services_v1.CreateHttpRouteRequest( parent="parent_value", @@ -58,4 +58,5 @@ async def sample_create_http_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateHttpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_sync.py index 4c0867d8e638..390eed6fdf27 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_http_route_sync.py @@ -40,7 +40,7 @@ def sample_create_http_route(): # Initialize request argument(s) http_route = network_services_v1.HttpRoute() - http_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + http_route.hostnames = ["hostnames_value1", "hostnames_value2"] request = network_services_v1.CreateHttpRouteRequest( parent="parent_value", @@ -58,4 +58,5 @@ def sample_create_http_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateHttpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_async.py index 15a9ad45dc1f..2c7c57ac9529 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_async.py @@ -54,4 +54,5 @@ async def sample_create_mesh(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateMesh_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_sync.py index cee3553ef598..6192e323d732 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_mesh_sync.py @@ -54,4 +54,5 @@ def sample_create_mesh(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateMesh_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_async.py index 002028b71a63..af8a6ecc67ae 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_async.py @@ -54,4 +54,5 @@ async def sample_create_service_binding(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateServiceBinding_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_sync.py index e2b228742425..ce167651a36d 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_binding_sync.py @@ -54,4 +54,5 @@ def sample_create_service_binding(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateServiceBinding_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_lb_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_lb_policy_async.py index c031753fc69c..61fbb83bc519 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_lb_policy_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_lb_policy_async.py @@ -54,4 +54,5 @@ async def sample_create_service_lb_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateServiceLbPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_lb_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_lb_policy_sync.py index ddc8a9c0d59b..f80c383cc70d 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_lb_policy_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_service_lb_policy_sync.py @@ -54,4 +54,5 @@ def sample_create_service_lb_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateServiceLbPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_async.py index 147059417582..6742e8281fd7 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_async.py @@ -54,4 +54,5 @@ async def sample_create_tcp_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateTcpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_sync.py index 2759850133e6..aaf77117a0d2 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tcp_route_sync.py @@ -54,4 +54,5 @@ def sample_create_tcp_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateTcpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_async.py index 7ebef38d34b6..396eec2c1ff8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_async.py @@ -58,4 +58,5 @@ async def sample_create_tls_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateTlsRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_sync.py index e1c02f4c187e..efb99d28cf2a 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_tls_route_sync.py @@ -58,4 +58,5 @@ def sample_create_tls_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateTlsRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_async.py index 41e24afae32b..406ea8c91023 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_async.py @@ -54,4 +54,5 @@ async def sample_create_wasm_plugin(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateWasmPlugin_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_sync.py index 8c7f88e5be37..b88ae660bd79 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_sync.py @@ -54,4 +54,5 @@ def sample_create_wasm_plugin(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateWasmPlugin_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_version_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_version_async.py index deba2baab421..66ba0d2bf0bc 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_version_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_version_async.py @@ -40,7 +40,7 @@ async def sample_create_wasm_plugin_version(): # Initialize request argument(s) wasm_plugin_version = network_services_v1.WasmPluginVersion() - wasm_plugin_version.plugin_config_data = b'plugin_config_data_blob' + wasm_plugin_version.plugin_config_data = b"plugin_config_data_blob" request = network_services_v1.CreateWasmPluginVersionRequest( parent="parent_value", @@ -58,4 +58,5 @@ async def sample_create_wasm_plugin_version(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateWasmPluginVersion_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_version_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_version_sync.py index ef3760f22578..0dad441276eb 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_version_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_create_wasm_plugin_version_sync.py @@ -40,7 +40,7 @@ def sample_create_wasm_plugin_version(): # Initialize request argument(s) wasm_plugin_version = network_services_v1.WasmPluginVersion() - wasm_plugin_version.plugin_config_data = b'plugin_config_data_blob' + wasm_plugin_version.plugin_config_data = b"plugin_config_data_blob" request = network_services_v1.CreateWasmPluginVersionRequest( parent="parent_value", @@ -58,4 +58,5 @@ def sample_create_wasm_plugin_version(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_CreateWasmPluginVersion_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_async.py index 7794080424bb..d3b8da553913 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_async.py @@ -53,4 +53,5 @@ async def sample_delete_endpoint_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteEndpointPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py index 4fb0c7140094..2ce5a29cb792 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_endpoint_policy_sync.py @@ -53,4 +53,5 @@ def sample_delete_endpoint_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteEndpointPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_async.py index 4e8e838bd714..eda5443e1584 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_async.py @@ -53,4 +53,5 @@ async def sample_delete_gateway(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteGateway_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_sync.py index 3dea531aa4d0..d3b87dcfb00b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_gateway_sync.py @@ -53,4 +53,5 @@ def sample_delete_gateway(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteGateway_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_async.py index c5404d18d764..449ccb7197ee 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_async.py @@ -53,4 +53,5 @@ async def sample_delete_grpc_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteGrpcRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_sync.py index 72e0897771e9..c949ba9e5731 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_grpc_route_sync.py @@ -53,4 +53,5 @@ def sample_delete_grpc_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteGrpcRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_async.py index dc7cae62c750..c809d9eeb4ba 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_async.py @@ -53,4 +53,5 @@ async def sample_delete_http_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteHttpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_sync.py index 370150cd3362..3b2d5e56d7d8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_http_route_sync.py @@ -53,4 +53,5 @@ def sample_delete_http_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteHttpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_async.py index a75da5001dfd..893235210bc8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_async.py @@ -53,4 +53,5 @@ async def sample_delete_mesh(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteMesh_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_sync.py index 47c439f0ef99..f44f22ecb548 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_mesh_sync.py @@ -53,4 +53,5 @@ def sample_delete_mesh(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteMesh_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_async.py index b013657438ad..4faf0c4b41d1 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_async.py @@ -53,4 +53,5 @@ async def sample_delete_service_binding(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteServiceBinding_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_sync.py index 825778a60fda..7b69d4fdd7c7 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_binding_sync.py @@ -53,4 +53,5 @@ def sample_delete_service_binding(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteServiceBinding_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_lb_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_lb_policy_async.py index 4309e92c0621..656654c6c7f5 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_lb_policy_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_lb_policy_async.py @@ -53,4 +53,5 @@ async def sample_delete_service_lb_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteServiceLbPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_lb_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_lb_policy_sync.py index fe74f17f8b55..c56e31f3e70b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_lb_policy_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_service_lb_policy_sync.py @@ -53,4 +53,5 @@ def sample_delete_service_lb_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteServiceLbPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_async.py index 624cac6bd326..75663a982fac 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_async.py @@ -53,4 +53,5 @@ async def sample_delete_tcp_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteTcpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_sync.py index 96fc1c5ebebf..fc50d086e78f 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tcp_route_sync.py @@ -53,4 +53,5 @@ def sample_delete_tcp_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteTcpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_async.py index 269959fb6830..d06dd827872b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_async.py @@ -53,4 +53,5 @@ async def sample_delete_tls_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteTlsRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_sync.py index 3192ef171172..477895d3ed01 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_tls_route_sync.py @@ -53,4 +53,5 @@ def sample_delete_tls_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteTlsRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_async.py index ec2ca6d5c93a..fcd9773a6727 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_async.py @@ -53,4 +53,5 @@ async def sample_delete_wasm_plugin(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteWasmPlugin_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_sync.py index 4f6a9dab00da..797eaee54d02 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_sync.py @@ -53,4 +53,5 @@ def sample_delete_wasm_plugin(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteWasmPlugin_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_version_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_version_async.py index fa30eb2c14d3..c7fb2176616d 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_version_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_version_async.py @@ -53,4 +53,5 @@ async def sample_delete_wasm_plugin_version(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteWasmPluginVersion_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_version_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_version_sync.py index fa5be3a9637b..38e77760383b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_version_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_delete_wasm_plugin_version_sync.py @@ -53,4 +53,5 @@ def sample_delete_wasm_plugin_version(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_DeleteWasmPluginVersion_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_async.py index be6eb5b7979a..8d58eacf51c5 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_async.py @@ -49,4 +49,5 @@ async def sample_get_endpoint_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetEndpointPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_sync.py index efaca2b409a4..d617e720d555 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_endpoint_policy_sync.py @@ -49,4 +49,5 @@ def sample_get_endpoint_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetEndpointPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_async.py index 51d90526a9c7..2a4416b20262 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_async.py @@ -49,4 +49,5 @@ async def sample_get_gateway(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetGateway_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_route_view_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_route_view_async.py index 1d58d5ee1918..ab6c7ca81868 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_route_view_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_route_view_async.py @@ -49,4 +49,5 @@ async def sample_get_gateway_route_view(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetGatewayRouteView_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_route_view_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_route_view_sync.py index df2c6d800467..c31d490cf5c0 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_route_view_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_route_view_sync.py @@ -49,4 +49,5 @@ def sample_get_gateway_route_view(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetGatewayRouteView_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_sync.py index cd912a51aa45..b13437016680 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_gateway_sync.py @@ -49,4 +49,5 @@ def sample_get_gateway(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetGateway_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_async.py index 9097182cb1c4..dfa4c5487ac0 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_async.py @@ -49,4 +49,5 @@ async def sample_get_grpc_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetGrpcRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_sync.py index ad2ad2649af3..d6659689e069 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_grpc_route_sync.py @@ -49,4 +49,5 @@ def sample_get_grpc_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetGrpcRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_async.py index ec715349968d..42495d94670d 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_async.py @@ -49,4 +49,5 @@ async def sample_get_http_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetHttpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_sync.py index b69d63503340..23efc7617f05 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_http_route_sync.py @@ -49,4 +49,5 @@ def sample_get_http_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetHttpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_async.py index 1d72b6437e66..0c9a6b680da3 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_async.py @@ -49,4 +49,5 @@ async def sample_get_mesh(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetMesh_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_route_view_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_route_view_async.py index cecd9211fa28..a1066fd21efd 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_route_view_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_route_view_async.py @@ -49,4 +49,5 @@ async def sample_get_mesh_route_view(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetMeshRouteView_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_route_view_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_route_view_sync.py index 7ef5777f0a9b..51a1f8187102 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_route_view_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_route_view_sync.py @@ -49,4 +49,5 @@ def sample_get_mesh_route_view(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetMeshRouteView_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_sync.py index 1f15f223cbbf..1d443967d79b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_mesh_sync.py @@ -49,4 +49,5 @@ def sample_get_mesh(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetMesh_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_async.py index ac2598d3c852..fe43560ae32e 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_async.py @@ -49,4 +49,5 @@ async def sample_get_service_binding(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetServiceBinding_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_sync.py index 225352870e9d..1dc3d8027079 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_binding_sync.py @@ -49,4 +49,5 @@ def sample_get_service_binding(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetServiceBinding_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_lb_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_lb_policy_async.py index 665920f7e3da..a2e678e04969 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_lb_policy_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_lb_policy_async.py @@ -49,4 +49,5 @@ async def sample_get_service_lb_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetServiceLbPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_lb_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_lb_policy_sync.py index c92afc53ea90..f840c29db438 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_lb_policy_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_service_lb_policy_sync.py @@ -49,4 +49,5 @@ def sample_get_service_lb_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetServiceLbPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_async.py index 60e938636bdf..3845eadb5fc7 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_async.py @@ -49,4 +49,5 @@ async def sample_get_tcp_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetTcpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_sync.py index 49ba9b5c02e9..de26dd6d5038 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tcp_route_sync.py @@ -49,4 +49,5 @@ def sample_get_tcp_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetTcpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_async.py index 47ecc3371b9c..10c0b29e6fc8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_async.py @@ -49,4 +49,5 @@ async def sample_get_tls_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetTlsRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_sync.py index d7a427398f6c..1bd609dd942b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_tls_route_sync.py @@ -49,4 +49,5 @@ def sample_get_tls_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetTlsRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_async.py index 1c0d7b0e22ba..d3e3ecdadcba 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_async.py @@ -49,4 +49,5 @@ async def sample_get_wasm_plugin(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetWasmPlugin_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_sync.py index 9981d0511591..a9a747cf630e 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_sync.py @@ -49,4 +49,5 @@ def sample_get_wasm_plugin(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetWasmPlugin_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_version_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_version_async.py index 761e96437918..07fe46c5b0af 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_version_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_version_async.py @@ -49,4 +49,5 @@ async def sample_get_wasm_plugin_version(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetWasmPluginVersion_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_version_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_version_sync.py index 7444b7420885..4cffb2dc4447 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_version_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_get_wasm_plugin_version_sync.py @@ -49,4 +49,5 @@ def sample_get_wasm_plugin_version(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_GetWasmPluginVersion_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_async.py index 3e2a399552ff..2a4fa5e9268f 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_async.py @@ -50,4 +50,5 @@ async def sample_list_endpoint_policies(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListEndpointPolicies_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_sync.py index e5db66a7af1a..55a9c54e864b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_endpoint_policies_sync.py @@ -50,4 +50,5 @@ def sample_list_endpoint_policies(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListEndpointPolicies_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateway_route_views_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateway_route_views_async.py index 151f110ccd70..0c8afca025dd 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateway_route_views_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateway_route_views_async.py @@ -50,4 +50,5 @@ async def sample_list_gateway_route_views(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListGatewayRouteViews_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateway_route_views_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateway_route_views_sync.py index 56d02f916a57..73ce6f712ae2 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateway_route_views_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateway_route_views_sync.py @@ -50,4 +50,5 @@ def sample_list_gateway_route_views(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListGatewayRouteViews_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_async.py index 2b13d0ca70fc..e3d87466387f 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_async.py @@ -50,4 +50,5 @@ async def sample_list_gateways(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListGateways_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_sync.py index ae7b1320f390..80c57478cabd 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_gateways_sync.py @@ -50,4 +50,5 @@ def sample_list_gateways(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListGateways_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_async.py index 020910a2ffb1..c44ca0cf53a0 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_async.py @@ -50,4 +50,5 @@ async def sample_list_grpc_routes(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListGrpcRoutes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_sync.py index 84e99a4e4198..3b52cc308e52 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_grpc_routes_sync.py @@ -50,4 +50,5 @@ def sample_list_grpc_routes(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListGrpcRoutes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_async.py index ec1a4cd8b5f1..761334dbef6f 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_async.py @@ -50,4 +50,5 @@ async def sample_list_http_routes(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListHttpRoutes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_sync.py index 5effdc2a0fb6..5cc03581bddf 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_http_routes_sync.py @@ -50,4 +50,5 @@ def sample_list_http_routes(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListHttpRoutes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_mesh_route_views_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_mesh_route_views_async.py index 9def810b3572..bed3427cf4b0 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_mesh_route_views_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_mesh_route_views_async.py @@ -50,4 +50,5 @@ async def sample_list_mesh_route_views(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListMeshRouteViews_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_mesh_route_views_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_mesh_route_views_sync.py index 9f770b6deaa2..35376719c09a 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_mesh_route_views_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_mesh_route_views_sync.py @@ -50,4 +50,5 @@ def sample_list_mesh_route_views(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListMeshRouteViews_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_async.py index acf1ca4e292d..65869c46a88f 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_async.py @@ -50,4 +50,5 @@ async def sample_list_meshes(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListMeshes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_sync.py index 3e2682d000c2..5784e80ff63c 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_meshes_sync.py @@ -50,4 +50,5 @@ def sample_list_meshes(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListMeshes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_async.py index 3b3b6af5dec4..3a12582238d1 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_async.py @@ -50,4 +50,5 @@ async def sample_list_service_bindings(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListServiceBindings_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_sync.py index e89eaa477188..7eafdddf28fb 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_bindings_sync.py @@ -50,4 +50,5 @@ def sample_list_service_bindings(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListServiceBindings_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_lb_policies_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_lb_policies_async.py index 4219c9eeaae9..e0cbfb52b86c 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_lb_policies_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_lb_policies_async.py @@ -50,4 +50,5 @@ async def sample_list_service_lb_policies(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListServiceLbPolicies_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_lb_policies_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_lb_policies_sync.py index bb62e7c8641f..b72213225e21 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_lb_policies_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_service_lb_policies_sync.py @@ -50,4 +50,5 @@ def sample_list_service_lb_policies(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListServiceLbPolicies_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_async.py index 06484ad73d56..4c5b8f6f60bb 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_async.py @@ -50,4 +50,5 @@ async def sample_list_tcp_routes(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListTcpRoutes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_sync.py index f5c9ea9f9df3..a5e1330b0c8a 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tcp_routes_sync.py @@ -50,4 +50,5 @@ def sample_list_tcp_routes(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListTcpRoutes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_async.py index baad957f9df0..52fad69821b0 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_async.py @@ -50,4 +50,5 @@ async def sample_list_tls_routes(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListTlsRoutes_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_sync.py index e3a7378615c6..d034ed6a8f08 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_tls_routes_sync.py @@ -50,4 +50,5 @@ def sample_list_tls_routes(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListTlsRoutes_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugin_versions_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugin_versions_async.py index 74bcde8873b4..0caa8a0362f8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugin_versions_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugin_versions_async.py @@ -50,4 +50,5 @@ async def sample_list_wasm_plugin_versions(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListWasmPluginVersions_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugin_versions_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugin_versions_sync.py index da65b83d01dc..a4f9997fe456 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugin_versions_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugin_versions_sync.py @@ -50,4 +50,5 @@ def sample_list_wasm_plugin_versions(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListWasmPluginVersions_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugins_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugins_async.py index 7aa3727e2439..347f308a784c 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugins_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugins_async.py @@ -50,4 +50,5 @@ async def sample_list_wasm_plugins(): async for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListWasmPlugins_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugins_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugins_sync.py index 0884762c6a98..5a697fdaec6b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugins_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_list_wasm_plugins_sync.py @@ -50,4 +50,5 @@ def sample_list_wasm_plugins(): for response in page_result: print(response) + # [END networkservices_v1_generated_NetworkServices_ListWasmPlugins_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_async.py index afd0b6611cb4..a5ef4599d2d8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_async.py @@ -56,4 +56,5 @@ async def sample_update_endpoint_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateEndpointPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_sync.py index ee9f7e6f077b..472c89849909 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_endpoint_policy_sync.py @@ -56,4 +56,5 @@ def sample_update_endpoint_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateEndpointPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_async.py index ab97a11a534f..724ee366cdd0 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_async.py @@ -56,4 +56,5 @@ async def sample_update_gateway(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateGateway_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_sync.py index b469bce17232..01bd31ddfcef 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_gateway_sync.py @@ -56,4 +56,5 @@ def sample_update_gateway(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateGateway_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_async.py index 2207dc5c8f88..26c296dce653 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_async.py @@ -40,7 +40,7 @@ async def sample_update_grpc_route(): # Initialize request argument(s) grpc_route = network_services_v1.GrpcRoute() - grpc_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + grpc_route.hostnames = ["hostnames_value1", "hostnames_value2"] request = network_services_v1.UpdateGrpcRouteRequest( grpc_route=grpc_route, @@ -56,4 +56,5 @@ async def sample_update_grpc_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateGrpcRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_sync.py index 1303dc68a40d..8b9d2c5d995d 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_grpc_route_sync.py @@ -40,7 +40,7 @@ def sample_update_grpc_route(): # Initialize request argument(s) grpc_route = network_services_v1.GrpcRoute() - grpc_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + grpc_route.hostnames = ["hostnames_value1", "hostnames_value2"] request = network_services_v1.UpdateGrpcRouteRequest( grpc_route=grpc_route, @@ -56,4 +56,5 @@ def sample_update_grpc_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateGrpcRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_async.py index 8f36fc791da8..b144fa17be4c 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_async.py @@ -40,7 +40,7 @@ async def sample_update_http_route(): # Initialize request argument(s) http_route = network_services_v1.HttpRoute() - http_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + http_route.hostnames = ["hostnames_value1", "hostnames_value2"] request = network_services_v1.UpdateHttpRouteRequest( http_route=http_route, @@ -56,4 +56,5 @@ async def sample_update_http_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateHttpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_sync.py index b2ae41098d74..9a9fe7466721 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_http_route_sync.py @@ -40,7 +40,7 @@ def sample_update_http_route(): # Initialize request argument(s) http_route = network_services_v1.HttpRoute() - http_route.hostnames = ['hostnames_value1', 'hostnames_value2'] + http_route.hostnames = ["hostnames_value1", "hostnames_value2"] request = network_services_v1.UpdateHttpRouteRequest( http_route=http_route, @@ -56,4 +56,5 @@ def sample_update_http_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateHttpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_async.py index bd38595adf8b..76aa54f8b557 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_async.py @@ -39,8 +39,7 @@ async def sample_update_mesh(): client = network_services_v1.NetworkServicesAsyncClient() # Initialize request argument(s) - request = network_services_v1.UpdateMeshRequest( - ) + request = network_services_v1.UpdateMeshRequest() # Make the request operation = client.update_mesh(request=request) @@ -52,4 +51,5 @@ async def sample_update_mesh(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateMesh_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_sync.py index 28bc5c9f4ad3..8edd1b8461c8 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_mesh_sync.py @@ -39,8 +39,7 @@ def sample_update_mesh(): client = network_services_v1.NetworkServicesClient() # Initialize request argument(s) - request = network_services_v1.UpdateMeshRequest( - ) + request = network_services_v1.UpdateMeshRequest() # Make the request operation = client.update_mesh(request=request) @@ -52,4 +51,5 @@ def sample_update_mesh(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateMesh_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_binding_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_binding_async.py index eaae070aadf8..b710e346cba4 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_binding_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_binding_async.py @@ -39,8 +39,7 @@ async def sample_update_service_binding(): client = network_services_v1.NetworkServicesAsyncClient() # Initialize request argument(s) - request = network_services_v1.UpdateServiceBindingRequest( - ) + request = network_services_v1.UpdateServiceBindingRequest() # Make the request operation = client.update_service_binding(request=request) @@ -52,4 +51,5 @@ async def sample_update_service_binding(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateServiceBinding_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_binding_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_binding_sync.py index 9d7331377b5e..ce9373e6ce85 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_binding_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_binding_sync.py @@ -39,8 +39,7 @@ def sample_update_service_binding(): client = network_services_v1.NetworkServicesClient() # Initialize request argument(s) - request = network_services_v1.UpdateServiceBindingRequest( - ) + request = network_services_v1.UpdateServiceBindingRequest() # Make the request operation = client.update_service_binding(request=request) @@ -52,4 +51,5 @@ def sample_update_service_binding(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateServiceBinding_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_lb_policy_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_lb_policy_async.py index db6d028ae7f3..e6b6778e48d1 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_lb_policy_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_lb_policy_async.py @@ -39,8 +39,7 @@ async def sample_update_service_lb_policy(): client = network_services_v1.NetworkServicesAsyncClient() # Initialize request argument(s) - request = network_services_v1.UpdateServiceLbPolicyRequest( - ) + request = network_services_v1.UpdateServiceLbPolicyRequest() # Make the request operation = client.update_service_lb_policy(request=request) @@ -52,4 +51,5 @@ async def sample_update_service_lb_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateServiceLbPolicy_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_lb_policy_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_lb_policy_sync.py index 32680b1b8e93..66bb8c44bc3d 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_lb_policy_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_service_lb_policy_sync.py @@ -39,8 +39,7 @@ def sample_update_service_lb_policy(): client = network_services_v1.NetworkServicesClient() # Initialize request argument(s) - request = network_services_v1.UpdateServiceLbPolicyRequest( - ) + request = network_services_v1.UpdateServiceLbPolicyRequest() # Make the request operation = client.update_service_lb_policy(request=request) @@ -52,4 +51,5 @@ def sample_update_service_lb_policy(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateServiceLbPolicy_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_async.py index c8f471442802..4e75156ac4b7 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_async.py @@ -39,8 +39,7 @@ async def sample_update_tcp_route(): client = network_services_v1.NetworkServicesAsyncClient() # Initialize request argument(s) - request = network_services_v1.UpdateTcpRouteRequest( - ) + request = network_services_v1.UpdateTcpRouteRequest() # Make the request operation = client.update_tcp_route(request=request) @@ -52,4 +51,5 @@ async def sample_update_tcp_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateTcpRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_sync.py index d7cc6972fd0e..c461998a5a52 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tcp_route_sync.py @@ -39,8 +39,7 @@ def sample_update_tcp_route(): client = network_services_v1.NetworkServicesClient() # Initialize request argument(s) - request = network_services_v1.UpdateTcpRouteRequest( - ) + request = network_services_v1.UpdateTcpRouteRequest() # Make the request operation = client.update_tcp_route(request=request) @@ -52,4 +51,5 @@ def sample_update_tcp_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateTcpRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_async.py index 4671bc73944e..6940a8f75c3b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_async.py @@ -56,4 +56,5 @@ async def sample_update_tls_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateTlsRoute_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_sync.py index b069e870e5c9..f23792b10c7b 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_tls_route_sync.py @@ -56,4 +56,5 @@ def sample_update_tls_route(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateTlsRoute_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_wasm_plugin_async.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_wasm_plugin_async.py index b70165ad552e..189981702442 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_wasm_plugin_async.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_wasm_plugin_async.py @@ -39,8 +39,7 @@ async def sample_update_wasm_plugin(): client = network_services_v1.NetworkServicesAsyncClient() # Initialize request argument(s) - request = network_services_v1.UpdateWasmPluginRequest( - ) + request = network_services_v1.UpdateWasmPluginRequest() # Make the request operation = client.update_wasm_plugin(request=request) @@ -52,4 +51,5 @@ async def sample_update_wasm_plugin(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateWasmPlugin_async] diff --git a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_wasm_plugin_sync.py b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_wasm_plugin_sync.py index 86831b2ed12d..32baea01483c 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_wasm_plugin_sync.py +++ b/packages/google-cloud-network-services/samples/generated_samples/networkservices_v1_generated_network_services_update_wasm_plugin_sync.py @@ -39,8 +39,7 @@ def sample_update_wasm_plugin(): client = network_services_v1.NetworkServicesClient() # Initialize request argument(s) - request = network_services_v1.UpdateWasmPluginRequest( - ) + request = network_services_v1.UpdateWasmPluginRequest() # Make the request operation = client.update_wasm_plugin(request=request) @@ -52,4 +51,5 @@ def sample_update_wasm_plugin(): # Handle the response print(response) + # [END networkservices_v1_generated_NetworkServices_UpdateWasmPlugin_sync] diff --git a/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json b/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json index 6c01fe290d6b..66d0274522af 100644 --- a/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json +++ b/packages/google-cloud-network-services/samples/generated_samples/snippet_metadata_google.cloud.networkservices.v1.json @@ -188,6 +188,183 @@ ], "title": "networkservices_v1_generated_dep_service_create_authz_extension_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", + "shortName": "DepServiceAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.create_lb_edge_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.CreateLbEdgeExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "CreateLbEdgeExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateLbEdgeExtensionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lb_edge_extension", + "type": "google.cloud.network_services_v1.types.LbEdgeExtension" + }, + { + "name": "lb_edge_extension_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_lb_edge_extension" + }, + "description": "Sample for CreateLbEdgeExtension", + "file": "networkservices_v1_generated_dep_service_create_lb_edge_extension_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_CreateLbEdgeExtension_async", + "segments": [ + { + "end": 66, + "start": 27, + "type": "FULL" + }, + { + "end": 66, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 56, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 63, + "start": 57, + "type": "REQUEST_EXECUTION" + }, + { + "end": 67, + "start": 64, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_create_lb_edge_extension_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceClient", + "shortName": "DepServiceClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceClient.create_lb_edge_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.CreateLbEdgeExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "CreateLbEdgeExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.CreateLbEdgeExtensionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "lb_edge_extension", + "type": "google.cloud.network_services_v1.types.LbEdgeExtension" + }, + { + "name": "lb_edge_extension_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_lb_edge_extension" + }, + "description": "Sample for CreateLbEdgeExtension", + "file": "networkservices_v1_generated_dep_service_create_lb_edge_extension_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_CreateLbEdgeExtension_sync", + "segments": [ + { + "end": 66, + "start": 27, + "type": "FULL" + }, + { + "end": 66, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 56, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 63, + "start": 57, + "type": "REQUEST_EXECUTION" + }, + { + "end": 67, + "start": 64, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_create_lb_edge_extension_sync.py" + }, { "canonical": true, "clientMethod": { @@ -711,19 +888,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", "shortName": "DepServiceAsyncClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.delete_lb_route_extension", + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.delete_lb_edge_extension", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbRouteExtension", + "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbEdgeExtension", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "DeleteLbRouteExtension" + "shortName": "DeleteLbEdgeExtension" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.DeleteLbRouteExtensionRequest" + "type": "google.cloud.network_services_v1.types.DeleteLbEdgeExtensionRequest" }, { "name": "name", @@ -743,13 +920,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_lb_route_extension" + "shortName": "delete_lb_edge_extension" }, - "description": "Sample for DeleteLbRouteExtension", - "file": "networkservices_v1_generated_dep_service_delete_lb_route_extension_async.py", + "description": "Sample for DeleteLbEdgeExtension", + "file": "networkservices_v1_generated_dep_service_delete_lb_edge_extension_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_DeleteLbRouteExtension_async", + "regionTag": "networkservices_v1_generated_DepService_DeleteLbEdgeExtension_async", "segments": [ { "end": 55, @@ -782,7 +959,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_delete_lb_route_extension_async.py" + "title": "networkservices_v1_generated_dep_service_delete_lb_edge_extension_async.py" }, { "canonical": true, @@ -791,19 +968,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceClient", "shortName": "DepServiceClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceClient.delete_lb_route_extension", + "fullName": "google.cloud.network_services_v1.DepServiceClient.delete_lb_edge_extension", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbRouteExtension", + "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbEdgeExtension", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "DeleteLbRouteExtension" + "shortName": "DeleteLbEdgeExtension" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.DeleteLbRouteExtensionRequest" + "type": "google.cloud.network_services_v1.types.DeleteLbEdgeExtensionRequest" }, { "name": "name", @@ -823,13 +1000,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_lb_route_extension" + "shortName": "delete_lb_edge_extension" }, - "description": "Sample for DeleteLbRouteExtension", - "file": "networkservices_v1_generated_dep_service_delete_lb_route_extension_sync.py", + "description": "Sample for DeleteLbEdgeExtension", + "file": "networkservices_v1_generated_dep_service_delete_lb_edge_extension_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_DeleteLbRouteExtension_sync", + "regionTag": "networkservices_v1_generated_DepService_DeleteLbEdgeExtension_sync", "segments": [ { "end": 55, @@ -862,7 +1039,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_delete_lb_route_extension_sync.py" + "title": "networkservices_v1_generated_dep_service_delete_lb_edge_extension_sync.py" }, { "canonical": true, @@ -872,19 +1049,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", "shortName": "DepServiceAsyncClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.delete_lb_traffic_extension", + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.delete_lb_route_extension", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbTrafficExtension", + "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbRouteExtension", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "DeleteLbTrafficExtension" + "shortName": "DeleteLbRouteExtension" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.DeleteLbTrafficExtensionRequest" + "type": "google.cloud.network_services_v1.types.DeleteLbRouteExtensionRequest" }, { "name": "name", @@ -904,13 +1081,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_lb_traffic_extension" + "shortName": "delete_lb_route_extension" }, - "description": "Sample for DeleteLbTrafficExtension", - "file": "networkservices_v1_generated_dep_service_delete_lb_traffic_extension_async.py", + "description": "Sample for DeleteLbRouteExtension", + "file": "networkservices_v1_generated_dep_service_delete_lb_route_extension_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_DeleteLbTrafficExtension_async", + "regionTag": "networkservices_v1_generated_DepService_DeleteLbRouteExtension_async", "segments": [ { "end": 55, @@ -943,7 +1120,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_delete_lb_traffic_extension_async.py" + "title": "networkservices_v1_generated_dep_service_delete_lb_route_extension_async.py" }, { "canonical": true, @@ -952,19 +1129,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceClient", "shortName": "DepServiceClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceClient.delete_lb_traffic_extension", + "fullName": "google.cloud.network_services_v1.DepServiceClient.delete_lb_route_extension", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbTrafficExtension", + "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbRouteExtension", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "DeleteLbTrafficExtension" + "shortName": "DeleteLbRouteExtension" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.DeleteLbTrafficExtensionRequest" + "type": "google.cloud.network_services_v1.types.DeleteLbRouteExtensionRequest" }, { "name": "name", @@ -984,13 +1161,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_lb_traffic_extension" + "shortName": "delete_lb_route_extension" }, - "description": "Sample for DeleteLbTrafficExtension", - "file": "networkservices_v1_generated_dep_service_delete_lb_traffic_extension_sync.py", + "description": "Sample for DeleteLbRouteExtension", + "file": "networkservices_v1_generated_dep_service_delete_lb_route_extension_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_DeleteLbTrafficExtension_sync", + "regionTag": "networkservices_v1_generated_DepService_DeleteLbRouteExtension_sync", "segments": [ { "end": 55, @@ -1023,7 +1200,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_delete_lb_traffic_extension_sync.py" + "title": "networkservices_v1_generated_dep_service_delete_lb_route_extension_sync.py" }, { "canonical": true, @@ -1033,19 +1210,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", "shortName": "DepServiceAsyncClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.get_authz_extension", + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.delete_lb_traffic_extension", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.GetAuthzExtension", + "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbTrafficExtension", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "GetAuthzExtension" + "shortName": "DeleteLbTrafficExtension" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.GetAuthzExtensionRequest" + "type": "google.cloud.network_services_v1.types.DeleteLbTrafficExtensionRequest" }, { "name": "name", @@ -1064,22 +1241,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_services_v1.types.AuthzExtension", - "shortName": "get_authz_extension" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_lb_traffic_extension" }, - "description": "Sample for GetAuthzExtension", - "file": "networkservices_v1_generated_dep_service_get_authz_extension_async.py", + "description": "Sample for DeleteLbTrafficExtension", + "file": "networkservices_v1_generated_dep_service_delete_lb_traffic_extension_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_GetAuthzExtension_async", + "regionTag": "networkservices_v1_generated_DepService_DeleteLbTrafficExtension_async", "segments": [ { - "end": 51, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1094,17 +1271,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_get_authz_extension_async.py" + "title": "networkservices_v1_generated_dep_service_delete_lb_traffic_extension_async.py" }, { "canonical": true, @@ -1113,19 +1290,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceClient", "shortName": "DepServiceClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceClient.get_authz_extension", + "fullName": "google.cloud.network_services_v1.DepServiceClient.delete_lb_traffic_extension", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.GetAuthzExtension", + "fullName": "google.cloud.networkservices.v1.DepService.DeleteLbTrafficExtension", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "GetAuthzExtension" + "shortName": "DeleteLbTrafficExtension" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.GetAuthzExtensionRequest" + "type": "google.cloud.network_services_v1.types.DeleteLbTrafficExtensionRequest" }, { "name": "name", @@ -1144,17 +1321,178 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_services_v1.types.AuthzExtension", - "shortName": "get_authz_extension" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_lb_traffic_extension" }, - "description": "Sample for GetAuthzExtension", - "file": "networkservices_v1_generated_dep_service_get_authz_extension_sync.py", + "description": "Sample for DeleteLbTrafficExtension", + "file": "networkservices_v1_generated_dep_service_delete_lb_traffic_extension_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_GetAuthzExtension_sync", + "regionTag": "networkservices_v1_generated_DepService_DeleteLbTrafficExtension_sync", "segments": [ { - "end": 51, + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_delete_lb_traffic_extension_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", + "shortName": "DepServiceAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.get_authz_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.GetAuthzExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "GetAuthzExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetAuthzExtensionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_services_v1.types.AuthzExtension", + "shortName": "get_authz_extension" + }, + "description": "Sample for GetAuthzExtension", + "file": "networkservices_v1_generated_dep_service_get_authz_extension_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_GetAuthzExtension_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_get_authz_extension_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceClient", + "shortName": "DepServiceClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceClient.get_authz_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.GetAuthzExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "GetAuthzExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetAuthzExtensionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_services_v1.types.AuthzExtension", + "shortName": "get_authz_extension" + }, + "description": "Sample for GetAuthzExtension", + "file": "networkservices_v1_generated_dep_service_get_authz_extension_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_GetAuthzExtension_sync", + "segments": [ + { + "end": 51, "start": 27, "type": "FULL" }, @@ -1186,6 +1524,167 @@ ], "title": "networkservices_v1_generated_dep_service_get_authz_extension_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", + "shortName": "DepServiceAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.get_lb_edge_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.GetLbEdgeExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "GetLbEdgeExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetLbEdgeExtensionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_services_v1.types.LbEdgeExtension", + "shortName": "get_lb_edge_extension" + }, + "description": "Sample for GetLbEdgeExtension", + "file": "networkservices_v1_generated_dep_service_get_lb_edge_extension_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_GetLbEdgeExtension_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_get_lb_edge_extension_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceClient", + "shortName": "DepServiceClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceClient.get_lb_edge_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.GetLbEdgeExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "GetLbEdgeExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetLbEdgeExtensionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_services_v1.types.LbEdgeExtension", + "shortName": "get_lb_edge_extension" + }, + "description": "Sample for GetLbEdgeExtension", + "file": "networkservices_v1_generated_dep_service_get_lb_edge_extension_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_GetLbEdgeExtension_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_get_lb_edge_extension_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1225,14 +1724,175 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_services_v1.types.LbRouteExtension", - "shortName": "get_lb_route_extension" + "resultType": "google.cloud.network_services_v1.types.LbRouteExtension", + "shortName": "get_lb_route_extension" + }, + "description": "Sample for GetLbRouteExtension", + "file": "networkservices_v1_generated_dep_service_get_lb_route_extension_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_GetLbRouteExtension_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_get_lb_route_extension_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceClient", + "shortName": "DepServiceClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceClient.get_lb_route_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.GetLbRouteExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "GetLbRouteExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetLbRouteExtensionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_services_v1.types.LbRouteExtension", + "shortName": "get_lb_route_extension" + }, + "description": "Sample for GetLbRouteExtension", + "file": "networkservices_v1_generated_dep_service_get_lb_route_extension_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_GetLbRouteExtension_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_get_lb_route_extension_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", + "shortName": "DepServiceAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.get_lb_traffic_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.GetLbTrafficExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "GetLbTrafficExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.GetLbTrafficExtensionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.network_services_v1.types.LbTrafficExtension", + "shortName": "get_lb_traffic_extension" }, - "description": "Sample for GetLbRouteExtension", - "file": "networkservices_v1_generated_dep_service_get_lb_route_extension_async.py", + "description": "Sample for GetLbTrafficExtension", + "file": "networkservices_v1_generated_dep_service_get_lb_traffic_extension_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_GetLbRouteExtension_async", + "regionTag": "networkservices_v1_generated_DepService_GetLbTrafficExtension_async", "segments": [ { "end": 51, @@ -1265,7 +1925,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_get_lb_route_extension_async.py" + "title": "networkservices_v1_generated_dep_service_get_lb_traffic_extension_async.py" }, { "canonical": true, @@ -1274,19 +1934,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceClient", "shortName": "DepServiceClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceClient.get_lb_route_extension", + "fullName": "google.cloud.network_services_v1.DepServiceClient.get_lb_traffic_extension", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.GetLbRouteExtension", + "fullName": "google.cloud.networkservices.v1.DepService.GetLbTrafficExtension", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "GetLbRouteExtension" + "shortName": "GetLbTrafficExtension" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.GetLbRouteExtensionRequest" + "type": "google.cloud.network_services_v1.types.GetLbTrafficExtensionRequest" }, { "name": "name", @@ -1305,14 +1965,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_services_v1.types.LbRouteExtension", - "shortName": "get_lb_route_extension" + "resultType": "google.cloud.network_services_v1.types.LbTrafficExtension", + "shortName": "get_lb_traffic_extension" }, - "description": "Sample for GetLbRouteExtension", - "file": "networkservices_v1_generated_dep_service_get_lb_route_extension_sync.py", + "description": "Sample for GetLbTrafficExtension", + "file": "networkservices_v1_generated_dep_service_get_lb_traffic_extension_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_GetLbRouteExtension_sync", + "regionTag": "networkservices_v1_generated_DepService_GetLbTrafficExtension_sync", "segments": [ { "end": 51, @@ -1345,7 +2005,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_get_lb_route_extension_sync.py" + "title": "networkservices_v1_generated_dep_service_get_lb_traffic_extension_sync.py" }, { "canonical": true, @@ -1355,22 +2015,22 @@ "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", "shortName": "DepServiceAsyncClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.get_lb_traffic_extension", + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.list_authz_extensions", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.GetLbTrafficExtension", + "fullName": "google.cloud.networkservices.v1.DepService.ListAuthzExtensions", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "GetLbTrafficExtension" + "shortName": "ListAuthzExtensions" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.GetLbTrafficExtensionRequest" + "type": "google.cloud.network_services_v1.types.ListAuthzExtensionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1386,22 +2046,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_services_v1.types.LbTrafficExtension", - "shortName": "get_lb_traffic_extension" + "resultType": "google.cloud.network_services_v1.services.dep_service.pagers.ListAuthzExtensionsAsyncPager", + "shortName": "list_authz_extensions" }, - "description": "Sample for GetLbTrafficExtension", - "file": "networkservices_v1_generated_dep_service_get_lb_traffic_extension_async.py", + "description": "Sample for ListAuthzExtensions", + "file": "networkservices_v1_generated_dep_service_list_authz_extensions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_GetLbTrafficExtension_async", + "regionTag": "networkservices_v1_generated_DepService_ListAuthzExtensions_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1421,12 +2081,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_get_lb_traffic_extension_async.py" + "title": "networkservices_v1_generated_dep_service_list_authz_extensions_async.py" }, { "canonical": true, @@ -1435,22 +2095,22 @@ "fullName": "google.cloud.network_services_v1.DepServiceClient", "shortName": "DepServiceClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceClient.get_lb_traffic_extension", + "fullName": "google.cloud.network_services_v1.DepServiceClient.list_authz_extensions", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.GetLbTrafficExtension", + "fullName": "google.cloud.networkservices.v1.DepService.ListAuthzExtensions", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "GetLbTrafficExtension" + "shortName": "ListAuthzExtensions" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.GetLbTrafficExtensionRequest" + "type": "google.cloud.network_services_v1.types.ListAuthzExtensionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1466,22 +2126,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_services_v1.types.LbTrafficExtension", - "shortName": "get_lb_traffic_extension" + "resultType": "google.cloud.network_services_v1.services.dep_service.pagers.ListAuthzExtensionsPager", + "shortName": "list_authz_extensions" }, - "description": "Sample for GetLbTrafficExtension", - "file": "networkservices_v1_generated_dep_service_get_lb_traffic_extension_sync.py", + "description": "Sample for ListAuthzExtensions", + "file": "networkservices_v1_generated_dep_service_list_authz_extensions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_GetLbTrafficExtension_sync", + "regionTag": "networkservices_v1_generated_DepService_ListAuthzExtensions_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1501,12 +2161,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_get_lb_traffic_extension_sync.py" + "title": "networkservices_v1_generated_dep_service_list_authz_extensions_sync.py" }, { "canonical": true, @@ -1516,19 +2176,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", "shortName": "DepServiceAsyncClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.list_authz_extensions", + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.list_lb_edge_extensions", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.ListAuthzExtensions", + "fullName": "google.cloud.networkservices.v1.DepService.ListLbEdgeExtensions", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "ListAuthzExtensions" + "shortName": "ListLbEdgeExtensions" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.ListAuthzExtensionsRequest" + "type": "google.cloud.network_services_v1.types.ListLbEdgeExtensionsRequest" }, { "name": "parent", @@ -1547,14 +2207,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_services_v1.services.dep_service.pagers.ListAuthzExtensionsAsyncPager", - "shortName": "list_authz_extensions" + "resultType": "google.cloud.network_services_v1.services.dep_service.pagers.ListLbEdgeExtensionsAsyncPager", + "shortName": "list_lb_edge_extensions" }, - "description": "Sample for ListAuthzExtensions", - "file": "networkservices_v1_generated_dep_service_list_authz_extensions_async.py", + "description": "Sample for ListLbEdgeExtensions", + "file": "networkservices_v1_generated_dep_service_list_lb_edge_extensions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_ListAuthzExtensions_async", + "regionTag": "networkservices_v1_generated_DepService_ListLbEdgeExtensions_async", "segments": [ { "end": 52, @@ -1587,7 +2247,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_list_authz_extensions_async.py" + "title": "networkservices_v1_generated_dep_service_list_lb_edge_extensions_async.py" }, { "canonical": true, @@ -1596,19 +2256,19 @@ "fullName": "google.cloud.network_services_v1.DepServiceClient", "shortName": "DepServiceClient" }, - "fullName": "google.cloud.network_services_v1.DepServiceClient.list_authz_extensions", + "fullName": "google.cloud.network_services_v1.DepServiceClient.list_lb_edge_extensions", "method": { - "fullName": "google.cloud.networkservices.v1.DepService.ListAuthzExtensions", + "fullName": "google.cloud.networkservices.v1.DepService.ListLbEdgeExtensions", "service": { "fullName": "google.cloud.networkservices.v1.DepService", "shortName": "DepService" }, - "shortName": "ListAuthzExtensions" + "shortName": "ListLbEdgeExtensions" }, "parameters": [ { "name": "request", - "type": "google.cloud.network_services_v1.types.ListAuthzExtensionsRequest" + "type": "google.cloud.network_services_v1.types.ListLbEdgeExtensionsRequest" }, { "name": "parent", @@ -1627,14 +2287,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.network_services_v1.services.dep_service.pagers.ListAuthzExtensionsPager", - "shortName": "list_authz_extensions" + "resultType": "google.cloud.network_services_v1.services.dep_service.pagers.ListLbEdgeExtensionsPager", + "shortName": "list_lb_edge_extensions" }, - "description": "Sample for ListAuthzExtensions", - "file": "networkservices_v1_generated_dep_service_list_authz_extensions_sync.py", + "description": "Sample for ListLbEdgeExtensions", + "file": "networkservices_v1_generated_dep_service_list_lb_edge_extensions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "networkservices_v1_generated_DepService_ListAuthzExtensions_sync", + "regionTag": "networkservices_v1_generated_DepService_ListLbEdgeExtensions_sync", "segments": [ { "end": 52, @@ -1667,7 +2327,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "networkservices_v1_generated_dep_service_list_authz_extensions_sync.py" + "title": "networkservices_v1_generated_dep_service_list_lb_edge_extensions_sync.py" }, { "canonical": true, @@ -2160,6 +2820,175 @@ ], "title": "networkservices_v1_generated_dep_service_update_authz_extension_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient", + "shortName": "DepServiceAsyncClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceAsyncClient.update_lb_edge_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.UpdateLbEdgeExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "UpdateLbEdgeExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateLbEdgeExtensionRequest" + }, + { + "name": "lb_edge_extension", + "type": "google.cloud.network_services_v1.types.LbEdgeExtension" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_lb_edge_extension" + }, + "description": "Sample for UpdateLbEdgeExtension", + "file": "networkservices_v1_generated_dep_service_update_lb_edge_extension_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_UpdateLbEdgeExtension_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_update_lb_edge_extension_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.network_services_v1.DepServiceClient", + "shortName": "DepServiceClient" + }, + "fullName": "google.cloud.network_services_v1.DepServiceClient.update_lb_edge_extension", + "method": { + "fullName": "google.cloud.networkservices.v1.DepService.UpdateLbEdgeExtension", + "service": { + "fullName": "google.cloud.networkservices.v1.DepService", + "shortName": "DepService" + }, + "shortName": "UpdateLbEdgeExtension" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.network_services_v1.types.UpdateLbEdgeExtensionRequest" + }, + { + "name": "lb_edge_extension", + "type": "google.cloud.network_services_v1.types.LbEdgeExtension" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_lb_edge_extension" + }, + "description": "Sample for UpdateLbEdgeExtension", + "file": "networkservices_v1_generated_dep_service_update_lb_edge_extension_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "networkservices_v1_generated_DepService_UpdateLbEdgeExtension_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "networkservices_v1_generated_dep_service_update_lb_edge_extension_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-network-services/scripts/fixup_network_services_v1_keywords.py b/packages/google-cloud-network-services/scripts/fixup_network_services_v1_keywords.py index cc07bbc3b13f..d8c4a3558f14 100644 --- a/packages/google-cloud-network-services/scripts/fixup_network_services_v1_keywords.py +++ b/packages/google-cloud-network-services/scripts/fixup_network_services_v1_keywords.py @@ -44,6 +44,7 @@ class network_servicesCallTransformer(cst.CSTTransformer): 'create_gateway': ('parent', 'gateway_id', 'gateway', ), 'create_grpc_route': ('parent', 'grpc_route_id', 'grpc_route', ), 'create_http_route': ('parent', 'http_route_id', 'http_route', ), + 'create_lb_edge_extension': ('parent', 'lb_edge_extension_id', 'lb_edge_extension', 'request_id', ), 'create_lb_route_extension': ('parent', 'lb_route_extension_id', 'lb_route_extension', 'request_id', ), 'create_lb_traffic_extension': ('parent', 'lb_traffic_extension_id', 'lb_traffic_extension', 'request_id', ), 'create_mesh': ('parent', 'mesh_id', 'mesh', ), @@ -58,6 +59,7 @@ class network_servicesCallTransformer(cst.CSTTransformer): 'delete_gateway': ('name', ), 'delete_grpc_route': ('name', ), 'delete_http_route': ('name', ), + 'delete_lb_edge_extension': ('name', 'request_id', ), 'delete_lb_route_extension': ('name', 'request_id', ), 'delete_lb_traffic_extension': ('name', 'request_id', ), 'delete_mesh': ('name', ), @@ -73,6 +75,7 @@ class network_servicesCallTransformer(cst.CSTTransformer): 'get_gateway_route_view': ('name', ), 'get_grpc_route': ('name', ), 'get_http_route': ('name', ), + 'get_lb_edge_extension': ('name', ), 'get_lb_route_extension': ('name', ), 'get_lb_traffic_extension': ('name', ), 'get_mesh': ('name', ), @@ -89,6 +92,7 @@ class network_servicesCallTransformer(cst.CSTTransformer): 'list_gateways': ('parent', 'page_size', 'page_token', ), 'list_grpc_routes': ('parent', 'page_size', 'page_token', 'return_partial_success', ), 'list_http_routes': ('parent', 'page_size', 'page_token', 'return_partial_success', ), + 'list_lb_edge_extensions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_lb_route_extensions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_lb_traffic_extensions': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_meshes': ('parent', 'page_size', 'page_token', 'return_partial_success', ), @@ -104,6 +108,7 @@ class network_servicesCallTransformer(cst.CSTTransformer): 'update_gateway': ('gateway', 'update_mask', ), 'update_grpc_route': ('grpc_route', 'update_mask', ), 'update_http_route': ('http_route', 'update_mask', ), + 'update_lb_edge_extension': ('lb_edge_extension', 'update_mask', 'request_id', ), 'update_lb_route_extension': ('lb_route_extension', 'update_mask', 'request_id', ), 'update_lb_traffic_extension': ('lb_traffic_extension', 'update_mask', 'request_id', ), 'update_mesh': ('mesh', 'update_mask', ), diff --git a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py index 4a21ee5d51e3..5cd34aa7c134 100644 --- a/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py +++ b/packages/google-cloud-network-services/tests/unit/gapic/network_services_v1/test_dep_service.py @@ -5043,11 +5043,11 @@ async def test_delete_lb_route_extension_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - dep.ListAuthzExtensionsRequest, + dep.ListLbEdgeExtensionsRequest, dict, ], ) -def test_list_authz_extensions(request_type, transport: str = "grpc"): +def test_list_lb_edge_extensions(request_type, transport: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5059,28 +5059,28 @@ def test_list_authz_extensions(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = dep.ListAuthzExtensionsResponse( + call.return_value = dep.ListLbEdgeExtensionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_authz_extensions(request) + response = client.list_lb_edge_extensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = dep.ListAuthzExtensionsRequest() + request = dep.ListLbEdgeExtensionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAuthzExtensionsPager) + assert isinstance(response, pagers.ListLbEdgeExtensionsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_authz_extensions_non_empty_request_with_auto_populated_field(): +def test_list_lb_edge_extensions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DepServiceClient( @@ -5091,7 +5091,7 @@ def test_list_authz_extensions_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = dep.ListAuthzExtensionsRequest( + request = dep.ListLbEdgeExtensionsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -5100,15 +5100,15 @@ def test_list_authz_extensions_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_authz_extensions(request=request) + client.list_lb_edge_extensions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dep.ListAuthzExtensionsRequest( + assert args[0] == dep.ListLbEdgeExtensionsRequest( parent="parent_value", page_token="page_token_value", filter="filter_value", @@ -5116,7 +5116,7 @@ def test_list_authz_extensions_non_empty_request_with_auto_populated_field(): ) -def test_list_authz_extensions_use_cached_wrapped_rpc(): +def test_list_lb_edge_extensions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5131,7 +5131,7 @@ def test_list_authz_extensions_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_authz_extensions + client._transport.list_lb_edge_extensions in client._transport._wrapped_methods ) @@ -5141,15 +5141,15 @@ def test_list_authz_extensions_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_authz_extensions + client._transport.list_lb_edge_extensions ] = mock_rpc request = {} - client.list_authz_extensions(request) + client.list_lb_edge_extensions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_authz_extensions(request) + client.list_lb_edge_extensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5157,7 +5157,7 @@ def test_list_authz_extensions_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_authz_extensions_async_use_cached_wrapped_rpc( +async def test_list_lb_edge_extensions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5174,7 +5174,7 @@ async def test_list_authz_extensions_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_authz_extensions + client._client._transport.list_lb_edge_extensions in client._client._transport._wrapped_methods ) @@ -5182,16 +5182,16 @@ async def test_list_authz_extensions_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_authz_extensions + client._client._transport.list_lb_edge_extensions ] = mock_rpc request = {} - await client.list_authz_extensions(request) + await client.list_lb_edge_extensions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_authz_extensions(request) + await client.list_lb_edge_extensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5199,8 +5199,8 @@ async def test_list_authz_extensions_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_authz_extensions_async( - transport: str = "grpc_asyncio", request_type=dep.ListAuthzExtensionsRequest +async def test_list_lb_edge_extensions_async( + transport: str = "grpc_asyncio", request_type=dep.ListLbEdgeExtensionsRequest ): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -5213,51 +5213,51 @@ async def test_list_authz_extensions_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.ListAuthzExtensionsResponse( + dep.ListLbEdgeExtensionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_authz_extensions(request) + response = await client.list_lb_edge_extensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = dep.ListAuthzExtensionsRequest() + request = dep.ListLbEdgeExtensionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAuthzExtensionsAsyncPager) + assert isinstance(response, pagers.ListLbEdgeExtensionsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_authz_extensions_async_from_dict(): - await test_list_authz_extensions_async(request_type=dict) +async def test_list_lb_edge_extensions_async_from_dict(): + await test_list_lb_edge_extensions_async(request_type=dict) -def test_list_authz_extensions_field_headers(): +def test_list_lb_edge_extensions_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.ListAuthzExtensionsRequest() + request = dep.ListLbEdgeExtensionsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: - call.return_value = dep.ListAuthzExtensionsResponse() - client.list_authz_extensions(request) + call.return_value = dep.ListLbEdgeExtensionsResponse() + client.list_lb_edge_extensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5273,25 +5273,25 @@ def test_list_authz_extensions_field_headers(): @pytest.mark.asyncio -async def test_list_authz_extensions_field_headers_async(): +async def test_list_lb_edge_extensions_field_headers_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.ListAuthzExtensionsRequest() + request = dep.ListLbEdgeExtensionsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.ListAuthzExtensionsResponse() + dep.ListLbEdgeExtensionsResponse() ) - await client.list_authz_extensions(request) + await client.list_lb_edge_extensions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5306,20 +5306,20 @@ async def test_list_authz_extensions_field_headers_async(): ) in kw["metadata"] -def test_list_authz_extensions_flattened(): +def test_list_lb_edge_extensions_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = dep.ListAuthzExtensionsResponse() + call.return_value = dep.ListLbEdgeExtensionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_authz_extensions( + client.list_lb_edge_extensions( parent="parent_value", ) @@ -5332,7 +5332,7 @@ def test_list_authz_extensions_flattened(): assert arg == mock_val -def test_list_authz_extensions_flattened_error(): +def test_list_lb_edge_extensions_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5340,31 +5340,31 @@ def test_list_authz_extensions_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_authz_extensions( - dep.ListAuthzExtensionsRequest(), + client.list_lb_edge_extensions( + dep.ListLbEdgeExtensionsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_authz_extensions_flattened_async(): +async def test_list_lb_edge_extensions_flattened_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = dep.ListAuthzExtensionsResponse() + call.return_value = dep.ListLbEdgeExtensionsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.ListAuthzExtensionsResponse() + dep.ListLbEdgeExtensionsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_authz_extensions( + response = await client.list_lb_edge_extensions( parent="parent_value", ) @@ -5378,7 +5378,7 @@ async def test_list_authz_extensions_flattened_async(): @pytest.mark.asyncio -async def test_list_authz_extensions_flattened_error_async(): +async def test_list_lb_edge_extensions_flattened_error_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5386,13 +5386,13 @@ async def test_list_authz_extensions_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_authz_extensions( - dep.ListAuthzExtensionsRequest(), + await client.list_lb_edge_extensions( + dep.ListLbEdgeExtensionsRequest(), parent="parent_value", ) -def test_list_authz_extensions_pager(transport_name: str = "grpc"): +def test_list_lb_edge_extensions_pager(transport_name: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -5400,32 +5400,32 @@ def test_list_authz_extensions_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), ], next_page_token="abc", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[], + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[], next_page_token="def", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), ], next_page_token="ghi", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), ], ), RuntimeError, @@ -5437,7 +5437,7 @@ def test_list_authz_extensions_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_authz_extensions(request={}, retry=retry, timeout=timeout) + pager = client.list_lb_edge_extensions(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata assert pager._retry == retry @@ -5445,10 +5445,10 @@ def test_list_authz_extensions_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all(isinstance(i, dep.AuthzExtension) for i in results) + assert all(isinstance(i, dep.LbEdgeExtension) for i in results) -def test_list_authz_extensions_pages(transport_name: str = "grpc"): +def test_list_lb_edge_extensions_pages(transport_name: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -5456,82 +5456,82 @@ def test_list_authz_extensions_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" + type(client.transport.list_lb_edge_extensions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), ], next_page_token="abc", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[], + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[], next_page_token="def", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), ], next_page_token="ghi", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), ], ), RuntimeError, ) - pages = list(client.list_authz_extensions(request={}).pages) + pages = list(client.list_lb_edge_extensions(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_authz_extensions_async_pager(): +async def test_list_lb_edge_extensions_async_pager(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), + type(client.transport.list_lb_edge_extensions), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), ], next_page_token="abc", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[], + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[], next_page_token="def", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), ], next_page_token="ghi", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), ], ), RuntimeError, ) - async_pager = await client.list_authz_extensions( + async_pager = await client.list_lb_edge_extensions( request={}, ) assert async_pager.next_page_token == "abc" @@ -5540,45 +5540,45 @@ async def test_list_authz_extensions_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, dep.AuthzExtension) for i in responses) + assert all(isinstance(i, dep.LbEdgeExtension) for i in responses) @pytest.mark.asyncio -async def test_list_authz_extensions_async_pages(): +async def test_list_lb_edge_extensions_async_pages(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_authz_extensions), + type(client.transport.list_lb_edge_extensions), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), ], next_page_token="abc", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[], + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[], next_page_token="def", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), ], next_page_token="ghi", ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), ], ), RuntimeError, @@ -5587,7 +5587,7 @@ async def test_list_authz_extensions_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_authz_extensions(request={}) + await client.list_lb_edge_extensions(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -5597,11 +5597,11 @@ async def test_list_authz_extensions_async_pages(): @pytest.mark.parametrize( "request_type", [ - dep.GetAuthzExtensionRequest, + dep.GetLbEdgeExtensionRequest, dict, ], ) -def test_get_authz_extension(request_type, transport: str = "grpc"): +def test_get_lb_edge_extension(request_type, transport: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5613,40 +5613,32 @@ def test_get_authz_extension(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" + type(client.transport.get_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = dep.AuthzExtension( + call.return_value = dep.LbEdgeExtension( name="name_value", description="description_value", + forwarding_rules=["forwarding_rules_value"], load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, - authority="authority_value", - service="service_value", - fail_open=True, - forward_headers=["forward_headers_value"], - wire_format=dep.WireFormat.EXT_PROC_GRPC, ) - response = client.get_authz_extension(request) + response = client.get_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = dep.GetAuthzExtensionRequest() + request = dep.GetLbEdgeExtensionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, dep.AuthzExtension) + assert isinstance(response, dep.LbEdgeExtension) assert response.name == "name_value" assert response.description == "description_value" + assert response.forwarding_rules == ["forwarding_rules_value"] assert response.load_balancing_scheme == dep.LoadBalancingScheme.INTERNAL_MANAGED - assert response.authority == "authority_value" - assert response.service == "service_value" - assert response.fail_open is True - assert response.forward_headers == ["forward_headers_value"] - assert response.wire_format == dep.WireFormat.EXT_PROC_GRPC -def test_get_authz_extension_non_empty_request_with_auto_populated_field(): +def test_get_lb_edge_extension_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DepServiceClient( @@ -5657,26 +5649,26 @@ def test_get_authz_extension_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = dep.GetAuthzExtensionRequest( + request = dep.GetLbEdgeExtensionRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" + type(client.transport.get_lb_edge_extension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_authz_extension(request=request) + client.get_lb_edge_extension(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dep.GetAuthzExtensionRequest( + assert args[0] == dep.GetLbEdgeExtensionRequest( name="name_value", ) -def test_get_authz_extension_use_cached_wrapped_rpc(): +def test_get_lb_edge_extension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5691,7 +5683,8 @@ def test_get_authz_extension_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_authz_extension in client._transport._wrapped_methods + client._transport.get_lb_edge_extension + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -5700,15 +5693,15 @@ def test_get_authz_extension_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_authz_extension + client._transport.get_lb_edge_extension ] = mock_rpc request = {} - client.get_authz_extension(request) + client.get_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_authz_extension(request) + client.get_lb_edge_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5716,7 +5709,7 @@ def test_get_authz_extension_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_authz_extension_async_use_cached_wrapped_rpc( +async def test_get_lb_edge_extension_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5733,7 +5726,7 @@ async def test_get_authz_extension_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_authz_extension + client._client._transport.get_lb_edge_extension in client._client._transport._wrapped_methods ) @@ -5741,16 +5734,16 @@ async def test_get_authz_extension_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_authz_extension + client._client._transport.get_lb_edge_extension ] = mock_rpc request = {} - await client.get_authz_extension(request) + await client.get_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_authz_extension(request) + await client.get_lb_edge_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5758,8 +5751,8 @@ async def test_get_authz_extension_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_authz_extension_async( - transport: str = "grpc_asyncio", request_type=dep.GetAuthzExtensionRequest +async def test_get_lb_edge_extension_async( + transport: str = "grpc_asyncio", request_type=dep.GetLbEdgeExtensionRequest ): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -5772,63 +5765,55 @@ async def test_get_authz_extension_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" + type(client.transport.get_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.AuthzExtension( + dep.LbEdgeExtension( name="name_value", description="description_value", + forwarding_rules=["forwarding_rules_value"], load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, - authority="authority_value", - service="service_value", - fail_open=True, - forward_headers=["forward_headers_value"], - wire_format=dep.WireFormat.EXT_PROC_GRPC, ) ) - response = await client.get_authz_extension(request) + response = await client.get_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = dep.GetAuthzExtensionRequest() + request = dep.GetLbEdgeExtensionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, dep.AuthzExtension) + assert isinstance(response, dep.LbEdgeExtension) assert response.name == "name_value" assert response.description == "description_value" + assert response.forwarding_rules == ["forwarding_rules_value"] assert response.load_balancing_scheme == dep.LoadBalancingScheme.INTERNAL_MANAGED - assert response.authority == "authority_value" - assert response.service == "service_value" - assert response.fail_open is True - assert response.forward_headers == ["forward_headers_value"] - assert response.wire_format == dep.WireFormat.EXT_PROC_GRPC @pytest.mark.asyncio -async def test_get_authz_extension_async_from_dict(): - await test_get_authz_extension_async(request_type=dict) +async def test_get_lb_edge_extension_async_from_dict(): + await test_get_lb_edge_extension_async(request_type=dict) -def test_get_authz_extension_field_headers(): +def test_get_lb_edge_extension_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.GetAuthzExtensionRequest() + request = dep.GetLbEdgeExtensionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" + type(client.transport.get_lb_edge_extension), "__call__" ) as call: - call.return_value = dep.AuthzExtension() - client.get_authz_extension(request) + call.return_value = dep.LbEdgeExtension() + client.get_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5844,23 +5829,23 @@ def test_get_authz_extension_field_headers(): @pytest.mark.asyncio -async def test_get_authz_extension_field_headers_async(): +async def test_get_lb_edge_extension_field_headers_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.GetAuthzExtensionRequest() + request = dep.GetLbEdgeExtensionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" + type(client.transport.get_lb_edge_extension), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dep.AuthzExtension()) - await client.get_authz_extension(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dep.LbEdgeExtension()) + await client.get_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5875,20 +5860,20 @@ async def test_get_authz_extension_field_headers_async(): ) in kw["metadata"] -def test_get_authz_extension_flattened(): +def test_get_lb_edge_extension_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" + type(client.transport.get_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = dep.AuthzExtension() + call.return_value = dep.LbEdgeExtension() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_authz_extension( + client.get_lb_edge_extension( name="name_value", ) @@ -5901,7 +5886,7 @@ def test_get_authz_extension_flattened(): assert arg == mock_val -def test_get_authz_extension_flattened_error(): +def test_get_lb_edge_extension_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5909,29 +5894,29 @@ def test_get_authz_extension_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_authz_extension( - dep.GetAuthzExtensionRequest(), + client.get_lb_edge_extension( + dep.GetLbEdgeExtensionRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_authz_extension_flattened_async(): +async def test_get_lb_edge_extension_flattened_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" + type(client.transport.get_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = dep.AuthzExtension() + call.return_value = dep.LbEdgeExtension() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dep.AuthzExtension()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dep.LbEdgeExtension()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_authz_extension( + response = await client.get_lb_edge_extension( name="name_value", ) @@ -5945,7 +5930,7 @@ async def test_get_authz_extension_flattened_async(): @pytest.mark.asyncio -async def test_get_authz_extension_flattened_error_async(): +async def test_get_lb_edge_extension_flattened_error_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5953,8 +5938,8 @@ async def test_get_authz_extension_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_authz_extension( - dep.GetAuthzExtensionRequest(), + await client.get_lb_edge_extension( + dep.GetLbEdgeExtensionRequest(), name="name_value", ) @@ -5962,11 +5947,11 @@ async def test_get_authz_extension_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - dep.CreateAuthzExtensionRequest, + dep.CreateLbEdgeExtensionRequest, dict, ], ) -def test_create_authz_extension(request_type, transport: str = "grpc"): +def test_create_lb_edge_extension(request_type, transport: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5978,23 +5963,23 @@ def test_create_authz_extension(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" + type(client.transport.create_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_authz_extension(request) + response = client.create_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = dep.CreateAuthzExtensionRequest() + request = dep.CreateLbEdgeExtensionRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_authz_extension_non_empty_request_with_auto_populated_field(): +def test_create_lb_edge_extension_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DepServiceClient( @@ -6005,28 +5990,28 @@ def test_create_authz_extension_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = dep.CreateAuthzExtensionRequest( + request = dep.CreateLbEdgeExtensionRequest( parent="parent_value", - authz_extension_id="authz_extension_id_value", + lb_edge_extension_id="lb_edge_extension_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" + type(client.transport.create_lb_edge_extension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_authz_extension(request=request) + client.create_lb_edge_extension(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dep.CreateAuthzExtensionRequest( + assert args[0] == dep.CreateLbEdgeExtensionRequest( parent="parent_value", - authz_extension_id="authz_extension_id_value", + lb_edge_extension_id="lb_edge_extension_id_value", ) -def test_create_authz_extension_use_cached_wrapped_rpc(): +def test_create_lb_edge_extension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6041,7 +6026,7 @@ def test_create_authz_extension_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_authz_extension + client._transport.create_lb_edge_extension in client._transport._wrapped_methods ) @@ -6051,10 +6036,10 @@ def test_create_authz_extension_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_authz_extension + client._transport.create_lb_edge_extension ] = mock_rpc request = {} - client.create_authz_extension(request) + client.create_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6064,7 +6049,7 @@ def test_create_authz_extension_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_authz_extension(request) + client.create_lb_edge_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6072,7 +6057,7 @@ def test_create_authz_extension_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_authz_extension_async_use_cached_wrapped_rpc( +async def test_create_lb_edge_extension_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6089,7 +6074,7 @@ async def test_create_authz_extension_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_authz_extension + client._client._transport.create_lb_edge_extension in client._client._transport._wrapped_methods ) @@ -6097,11 +6082,11 @@ async def test_create_authz_extension_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_authz_extension + client._client._transport.create_lb_edge_extension ] = mock_rpc request = {} - await client.create_authz_extension(request) + await client.create_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6111,7 +6096,7 @@ async def test_create_authz_extension_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_authz_extension(request) + await client.create_lb_edge_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6119,8 +6104,8 @@ async def test_create_authz_extension_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_authz_extension_async( - transport: str = "grpc_asyncio", request_type=dep.CreateAuthzExtensionRequest +async def test_create_lb_edge_extension_async( + transport: str = "grpc_asyncio", request_type=dep.CreateLbEdgeExtensionRequest ): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -6133,18 +6118,18 @@ async def test_create_authz_extension_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" + type(client.transport.create_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_authz_extension(request) + response = await client.create_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = dep.CreateAuthzExtensionRequest() + request = dep.CreateLbEdgeExtensionRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -6152,27 +6137,27 @@ async def test_create_authz_extension_async( @pytest.mark.asyncio -async def test_create_authz_extension_async_from_dict(): - await test_create_authz_extension_async(request_type=dict) +async def test_create_lb_edge_extension_async_from_dict(): + await test_create_lb_edge_extension_async(request_type=dict) -def test_create_authz_extension_field_headers(): +def test_create_lb_edge_extension_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.CreateAuthzExtensionRequest() + request = dep.CreateLbEdgeExtensionRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" + type(client.transport.create_lb_edge_extension), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_authz_extension(request) + client.create_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6188,25 +6173,25 @@ def test_create_authz_extension_field_headers(): @pytest.mark.asyncio -async def test_create_authz_extension_field_headers_async(): +async def test_create_lb_edge_extension_field_headers_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.CreateAuthzExtensionRequest() + request = dep.CreateLbEdgeExtensionRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" + type(client.transport.create_lb_edge_extension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_authz_extension(request) + await client.create_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6221,23 +6206,23 @@ async def test_create_authz_extension_field_headers_async(): ) in kw["metadata"] -def test_create_authz_extension_flattened(): +def test_create_lb_edge_extension_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" + type(client.transport.create_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_authz_extension( + client.create_lb_edge_extension( parent="parent_value", - authz_extension=dep.AuthzExtension(name="name_value"), - authz_extension_id="authz_extension_id_value", + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), + lb_edge_extension_id="lb_edge_extension_id_value", ) # Establish that the underlying call was made with the expected @@ -6247,15 +6232,15 @@ def test_create_authz_extension_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].authz_extension - mock_val = dep.AuthzExtension(name="name_value") + arg = args[0].lb_edge_extension + mock_val = dep.LbEdgeExtension(name="name_value") assert arg == mock_val - arg = args[0].authz_extension_id - mock_val = "authz_extension_id_value" + arg = args[0].lb_edge_extension_id + mock_val = "lb_edge_extension_id_value" assert arg == mock_val -def test_create_authz_extension_flattened_error(): +def test_create_lb_edge_extension_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6263,23 +6248,23 @@ def test_create_authz_extension_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_authz_extension( - dep.CreateAuthzExtensionRequest(), + client.create_lb_edge_extension( + dep.CreateLbEdgeExtensionRequest(), parent="parent_value", - authz_extension=dep.AuthzExtension(name="name_value"), - authz_extension_id="authz_extension_id_value", + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), + lb_edge_extension_id="lb_edge_extension_id_value", ) @pytest.mark.asyncio -async def test_create_authz_extension_flattened_async(): +async def test_create_lb_edge_extension_flattened_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" + type(client.transport.create_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -6289,10 +6274,10 @@ async def test_create_authz_extension_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_authz_extension( + response = await client.create_lb_edge_extension( parent="parent_value", - authz_extension=dep.AuthzExtension(name="name_value"), - authz_extension_id="authz_extension_id_value", + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), + lb_edge_extension_id="lb_edge_extension_id_value", ) # Establish that the underlying call was made with the expected @@ -6302,16 +6287,16 @@ async def test_create_authz_extension_flattened_async(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].authz_extension - mock_val = dep.AuthzExtension(name="name_value") + arg = args[0].lb_edge_extension + mock_val = dep.LbEdgeExtension(name="name_value") assert arg == mock_val - arg = args[0].authz_extension_id - mock_val = "authz_extension_id_value" + arg = args[0].lb_edge_extension_id + mock_val = "lb_edge_extension_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_authz_extension_flattened_error_async(): +async def test_create_lb_edge_extension_flattened_error_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -6319,22 +6304,22 @@ async def test_create_authz_extension_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_authz_extension( - dep.CreateAuthzExtensionRequest(), + await client.create_lb_edge_extension( + dep.CreateLbEdgeExtensionRequest(), parent="parent_value", - authz_extension=dep.AuthzExtension(name="name_value"), - authz_extension_id="authz_extension_id_value", + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), + lb_edge_extension_id="lb_edge_extension_id_value", ) @pytest.mark.parametrize( "request_type", [ - dep.UpdateAuthzExtensionRequest, + dep.UpdateLbEdgeExtensionRequest, dict, ], ) -def test_update_authz_extension(request_type, transport: str = "grpc"): +def test_update_lb_edge_extension(request_type, transport: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6346,23 +6331,23 @@ def test_update_authz_extension(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" + type(client.transport.update_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.update_authz_extension(request) + response = client.update_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = dep.UpdateAuthzExtensionRequest() + request = dep.UpdateLbEdgeExtensionRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_update_authz_extension_non_empty_request_with_auto_populated_field(): +def test_update_lb_edge_extension_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DepServiceClient( @@ -6373,22 +6358,22 @@ def test_update_authz_extension_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = dep.UpdateAuthzExtensionRequest() + request = dep.UpdateLbEdgeExtensionRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" + type(client.transport.update_lb_edge_extension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.update_authz_extension(request=request) + client.update_lb_edge_extension(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dep.UpdateAuthzExtensionRequest() + assert args[0] == dep.UpdateLbEdgeExtensionRequest() -def test_update_authz_extension_use_cached_wrapped_rpc(): +def test_update_lb_edge_extension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6403,7 +6388,7 @@ def test_update_authz_extension_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_authz_extension + client._transport.update_lb_edge_extension in client._transport._wrapped_methods ) @@ -6413,10 +6398,10 @@ def test_update_authz_extension_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_authz_extension + client._transport.update_lb_edge_extension ] = mock_rpc request = {} - client.update_authz_extension(request) + client.update_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6426,7 +6411,7 @@ def test_update_authz_extension_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_authz_extension(request) + client.update_lb_edge_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6434,7 +6419,7 @@ def test_update_authz_extension_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_update_authz_extension_async_use_cached_wrapped_rpc( +async def test_update_lb_edge_extension_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6451,7 +6436,7 @@ async def test_update_authz_extension_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.update_authz_extension + client._client._transport.update_lb_edge_extension in client._client._transport._wrapped_methods ) @@ -6459,11 +6444,11 @@ async def test_update_authz_extension_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.update_authz_extension + client._client._transport.update_lb_edge_extension ] = mock_rpc request = {} - await client.update_authz_extension(request) + await client.update_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6473,7 +6458,7 @@ async def test_update_authz_extension_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.update_authz_extension(request) + await client.update_lb_edge_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6481,8 +6466,8 @@ async def test_update_authz_extension_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_update_authz_extension_async( - transport: str = "grpc_asyncio", request_type=dep.UpdateAuthzExtensionRequest +async def test_update_lb_edge_extension_async( + transport: str = "grpc_asyncio", request_type=dep.UpdateLbEdgeExtensionRequest ): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -6495,18 +6480,18 @@ async def test_update_authz_extension_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" + type(client.transport.update_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.update_authz_extension(request) + response = await client.update_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = dep.UpdateAuthzExtensionRequest() + request = dep.UpdateLbEdgeExtensionRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -6514,27 +6499,27 @@ async def test_update_authz_extension_async( @pytest.mark.asyncio -async def test_update_authz_extension_async_from_dict(): - await test_update_authz_extension_async(request_type=dict) +async def test_update_lb_edge_extension_async_from_dict(): + await test_update_lb_edge_extension_async(request_type=dict) -def test_update_authz_extension_field_headers(): +def test_update_lb_edge_extension_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.UpdateAuthzExtensionRequest() + request = dep.UpdateLbEdgeExtensionRequest() - request.authz_extension.name = "name_value" + request.lb_edge_extension.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" + type(client.transport.update_lb_edge_extension), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.update_authz_extension(request) + client.update_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6545,30 +6530,30 @@ def test_update_authz_extension_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "authz_extension.name=name_value", + "lb_edge_extension.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_update_authz_extension_field_headers_async(): +async def test_update_lb_edge_extension_field_headers_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.UpdateAuthzExtensionRequest() + request = dep.UpdateLbEdgeExtensionRequest() - request.authz_extension.name = "name_value" + request.lb_edge_extension.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" + type(client.transport.update_lb_edge_extension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.update_authz_extension(request) + await client.update_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6579,25 +6564,25 @@ async def test_update_authz_extension_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "authz_extension.name=name_value", + "lb_edge_extension.name=name_value", ) in kw["metadata"] -def test_update_authz_extension_flattened(): +def test_update_lb_edge_extension_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" + type(client.transport.update_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_authz_extension( - authz_extension=dep.AuthzExtension(name="name_value"), + client.update_lb_edge_extension( + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -6605,15 +6590,15 @@ def test_update_authz_extension_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].authz_extension - mock_val = dep.AuthzExtension(name="name_value") + arg = args[0].lb_edge_extension + mock_val = dep.LbEdgeExtension(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_update_authz_extension_flattened_error(): +def test_update_lb_edge_extension_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6621,22 +6606,22 @@ def test_update_authz_extension_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_authz_extension( - dep.UpdateAuthzExtensionRequest(), - authz_extension=dep.AuthzExtension(name="name_value"), + client.update_lb_edge_extension( + dep.UpdateLbEdgeExtensionRequest(), + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_update_authz_extension_flattened_async(): +async def test_update_lb_edge_extension_flattened_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" + type(client.transport.update_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -6646,8 +6631,8 @@ async def test_update_authz_extension_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_authz_extension( - authz_extension=dep.AuthzExtension(name="name_value"), + response = await client.update_lb_edge_extension( + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -6655,8 +6640,8 @@ async def test_update_authz_extension_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].authz_extension - mock_val = dep.AuthzExtension(name="name_value") + arg = args[0].lb_edge_extension + mock_val = dep.LbEdgeExtension(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) @@ -6664,7 +6649,7 @@ async def test_update_authz_extension_flattened_async(): @pytest.mark.asyncio -async def test_update_authz_extension_flattened_error_async(): +async def test_update_lb_edge_extension_flattened_error_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -6672,9 +6657,9 @@ async def test_update_authz_extension_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_authz_extension( - dep.UpdateAuthzExtensionRequest(), - authz_extension=dep.AuthzExtension(name="name_value"), + await client.update_lb_edge_extension( + dep.UpdateLbEdgeExtensionRequest(), + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -6682,11 +6667,11 @@ async def test_update_authz_extension_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - dep.DeleteAuthzExtensionRequest, + dep.DeleteLbEdgeExtensionRequest, dict, ], ) -def test_delete_authz_extension(request_type, transport: str = "grpc"): +def test_delete_lb_edge_extension(request_type, transport: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6698,23 +6683,23 @@ def test_delete_authz_extension(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" + type(client.transport.delete_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_authz_extension(request) + response = client.delete_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = dep.DeleteAuthzExtensionRequest() + request = dep.DeleteLbEdgeExtensionRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_authz_extension_non_empty_request_with_auto_populated_field(): +def test_delete_lb_edge_extension_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = DepServiceClient( @@ -6725,26 +6710,26 @@ def test_delete_authz_extension_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = dep.DeleteAuthzExtensionRequest( + request = dep.DeleteLbEdgeExtensionRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" + type(client.transport.delete_lb_edge_extension), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_authz_extension(request=request) + client.delete_lb_edge_extension(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == dep.DeleteAuthzExtensionRequest( + assert args[0] == dep.DeleteLbEdgeExtensionRequest( name="name_value", ) -def test_delete_authz_extension_use_cached_wrapped_rpc(): +def test_delete_lb_edge_extension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6759,7 +6744,7 @@ def test_delete_authz_extension_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_authz_extension + client._transport.delete_lb_edge_extension in client._transport._wrapped_methods ) @@ -6769,10 +6754,10 @@ def test_delete_authz_extension_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_authz_extension + client._transport.delete_lb_edge_extension ] = mock_rpc request = {} - client.delete_authz_extension(request) + client.delete_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6782,7 +6767,7 @@ def test_delete_authz_extension_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_authz_extension(request) + client.delete_lb_edge_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6790,7 +6775,7 @@ def test_delete_authz_extension_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_authz_extension_async_use_cached_wrapped_rpc( +async def test_delete_lb_edge_extension_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6807,7 +6792,7 @@ async def test_delete_authz_extension_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_authz_extension + client._client._transport.delete_lb_edge_extension in client._client._transport._wrapped_methods ) @@ -6815,11 +6800,11 @@ async def test_delete_authz_extension_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_authz_extension + client._client._transport.delete_lb_edge_extension ] = mock_rpc request = {} - await client.delete_authz_extension(request) + await client.delete_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -6829,7 +6814,7 @@ async def test_delete_authz_extension_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_authz_extension(request) + await client.delete_lb_edge_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6837,8 +6822,8 @@ async def test_delete_authz_extension_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_authz_extension_async( - transport: str = "grpc_asyncio", request_type=dep.DeleteAuthzExtensionRequest +async def test_delete_lb_edge_extension_async( + transport: str = "grpc_asyncio", request_type=dep.DeleteLbEdgeExtensionRequest ): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), @@ -6851,18 +6836,18 @@ async def test_delete_authz_extension_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" + type(client.transport.delete_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_authz_extension(request) + response = await client.delete_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = dep.DeleteAuthzExtensionRequest() + request = dep.DeleteLbEdgeExtensionRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -6870,27 +6855,27 @@ async def test_delete_authz_extension_async( @pytest.mark.asyncio -async def test_delete_authz_extension_async_from_dict(): - await test_delete_authz_extension_async(request_type=dict) +async def test_delete_lb_edge_extension_async_from_dict(): + await test_delete_lb_edge_extension_async(request_type=dict) -def test_delete_authz_extension_field_headers(): +def test_delete_lb_edge_extension_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.DeleteAuthzExtensionRequest() + request = dep.DeleteLbEdgeExtensionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" + type(client.transport.delete_lb_edge_extension), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_authz_extension(request) + client.delete_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6906,25 +6891,25 @@ def test_delete_authz_extension_field_headers(): @pytest.mark.asyncio -async def test_delete_authz_extension_field_headers_async(): +async def test_delete_lb_edge_extension_field_headers_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = dep.DeleteAuthzExtensionRequest() + request = dep.DeleteLbEdgeExtensionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" + type(client.transport.delete_lb_edge_extension), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_authz_extension(request) + await client.delete_lb_edge_extension(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6939,20 +6924,20 @@ async def test_delete_authz_extension_field_headers_async(): ) in kw["metadata"] -def test_delete_authz_extension_flattened(): +def test_delete_lb_edge_extension_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" + type(client.transport.delete_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_authz_extension( + client.delete_lb_edge_extension( name="name_value", ) @@ -6965,7 +6950,7 @@ def test_delete_authz_extension_flattened(): assert arg == mock_val -def test_delete_authz_extension_flattened_error(): +def test_delete_lb_edge_extension_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -6973,21 +6958,21 @@ def test_delete_authz_extension_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_authz_extension( - dep.DeleteAuthzExtensionRequest(), + client.delete_lb_edge_extension( + dep.DeleteLbEdgeExtensionRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_delete_authz_extension_flattened_async(): +async def test_delete_lb_edge_extension_flattened_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" + type(client.transport.delete_lb_edge_extension), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -6997,7 +6982,7 @@ async def test_delete_authz_extension_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_authz_extension( + response = await client.delete_lb_edge_extension( name="name_value", ) @@ -7011,7 +6996,7 @@ async def test_delete_authz_extension_flattened_async(): @pytest.mark.asyncio -async def test_delete_authz_extension_flattened_error_async(): +async def test_delete_lb_edge_extension_flattened_error_async(): client = DepServiceAsyncClient( credentials=async_anonymous_credentials(), ) @@ -7019,19 +7004,95 @@ async def test_delete_authz_extension_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_authz_extension( - dep.DeleteAuthzExtensionRequest(), + await client.delete_lb_edge_extension( + dep.DeleteLbEdgeExtensionRequest(), name="name_value", ) -def test_list_lb_traffic_extensions_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + dep.ListAuthzExtensionsRequest, + dict, + ], +) +def test_list_authz_extensions(request_type, transport: str = "grpc"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dep.ListAuthzExtensionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_authz_extensions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dep.ListAuthzExtensionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAuthzExtensionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_authz_extensions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dep.ListAuthzExtensionsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_authz_extensions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dep.ListAuthzExtensionsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_authz_extensions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -7040,7 +7101,7 @@ def test_list_lb_traffic_extensions_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_lb_traffic_extensions + client._transport.list_authz_extensions in client._transport._wrapped_methods ) @@ -7050,438 +7111,548 @@ def test_list_lb_traffic_extensions_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_lb_traffic_extensions + client._transport.list_authz_extensions ] = mock_rpc - request = {} - client.list_lb_traffic_extensions(request) + client.list_authz_extensions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_lb_traffic_extensions(request) + client.list_authz_extensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_lb_traffic_extensions_rest_required_fields( - request_type=dep.ListLbTrafficExtensionsRequest, +@pytest.mark.asyncio +async def test_list_authz_extensions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DepServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_authz_extensions + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_lb_traffic_extensions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_authz_extensions + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.list_authz_extensions(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_lb_traffic_extensions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) + await client.list_authz_extensions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_authz_extensions_async( + transport: str = "grpc_asyncio", request_type=dep.ListAuthzExtensionsRequest +): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.ListAuthzExtensionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_authz_extensions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dep.ListAuthzExtensionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAuthzExtensionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_authz_extensions_async_from_dict(): + await test_list_authz_extensions_async(request_type=dict) + + +def test_list_authz_extensions_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dep.ListLbTrafficExtensionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.ListAuthzExtensionsRequest() - # Convert return value to protobuf type - return_value = dep.ListLbTrafficExtensionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + call.return_value = dep.ListAuthzExtensionsResponse() + client.list_authz_extensions(request) - response = client.list_lb_traffic_extensions(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_list_lb_traffic_extensions_rest_unset_required_fields(): - transport = transports.DepServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_authz_extensions_field_headers_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.list_lb_traffic_extensions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.ListAuthzExtensionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.ListAuthzExtensionsResponse() ) - & set(("parent",)) - ) + await client.list_authz_extensions(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_list_lb_traffic_extensions_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_authz_extensions_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = dep.ListLbTrafficExtensionsResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dep.ListAuthzExtensionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_authz_extensions( + parent="parent_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_list_authz_extensions_flattened_error(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_authz_extensions( + dep.ListAuthzExtensionsRequest(), parent="parent_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dep.ListLbTrafficExtensionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_lb_traffic_extensions(**mock_args) +@pytest.mark.asyncio +async def test_list_authz_extensions_flattened_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dep.ListAuthzExtensionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.ListAuthzExtensionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_authz_extensions( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/lbTrafficExtensions" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_list_lb_traffic_extensions_rest_flattened_error(transport: str = "rest"): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_list_authz_extensions_flattened_error_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_lb_traffic_extensions( - dep.ListLbTrafficExtensionsRequest(), + await client.list_authz_extensions( + dep.ListAuthzExtensionsRequest(), parent="parent_value", ) -def test_list_lb_traffic_extensions_rest_pager(transport: str = "rest"): +def test_list_authz_extensions_pager(transport_name: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dep.ListLbTrafficExtensionsResponse( - lb_traffic_extensions=[ - dep.LbTrafficExtension(), - dep.LbTrafficExtension(), - dep.LbTrafficExtension(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + dep.AuthzExtension(), ], next_page_token="abc", ), - dep.ListLbTrafficExtensionsResponse( - lb_traffic_extensions=[], + dep.ListAuthzExtensionsResponse( + authz_extensions=[], next_page_token="def", ), - dep.ListLbTrafficExtensionsResponse( - lb_traffic_extensions=[ - dep.LbTrafficExtension(), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), ], next_page_token="ghi", ), - dep.ListLbTrafficExtensionsResponse( - lb_traffic_extensions=[ - dep.LbTrafficExtension(), - dep.LbTrafficExtension(), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), ], ), + RuntimeError, ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple( - dep.ListLbTrafficExtensionsResponse.to_json(x) for x in response + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} + pager = client.list_authz_extensions(request={}, retry=retry, timeout=timeout) - pager = client.list_lb_traffic_extensions(request=sample_request) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 - assert all(isinstance(i, dep.LbTrafficExtension) for i in results) + assert all(isinstance(i, dep.AuthzExtension) for i in results) - pages = list(client.list_lb_traffic_extensions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_list_authz_extensions_pages(transport_name: str = "grpc"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) -def test_get_lb_traffic_extension_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + dep.AuthzExtension(), + ], + next_page_token="abc", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[], + next_page_token="def", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + ], + next_page_token="ghi", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + ], + ), + RuntimeError, ) + pages = list(client.list_authz_extensions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert ( - client._transport.get_lb_traffic_extension - in client._transport._wrapped_methods - ) +@pytest.mark.asyncio +async def test_list_authz_extensions_async_pager(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + dep.AuthzExtension(), + ], + next_page_token="abc", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[], + next_page_token="def", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + ], + next_page_token="ghi", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + ], + ), + RuntimeError, ) - client._transport._wrapped_methods[ - client._transport.get_lb_traffic_extension - ] = mock_rpc - - request = {} - client.get_lb_traffic_extension(request) + async_pager = await client.list_authz_extensions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + assert len(responses) == 6 + assert all(isinstance(i, dep.AuthzExtension) for i in responses) - client.get_lb_traffic_extension(request) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +@pytest.mark.asyncio +async def test_list_authz_extensions_async_pages(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + dep.AuthzExtension(), + ], + next_page_token="abc", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[], + next_page_token="def", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + ], + next_page_token="ghi", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_authz_extensions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_get_lb_traffic_extension_rest_required_fields( - request_type=dep.GetLbTrafficExtensionRequest, -): - transport_class = transports.DepServiceRestTransport - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.parametrize( + "request_type", + [ + dep.GetAuthzExtensionRequest, + dict, + ], +) +def test_get_authz_extension(request_type, transport: str = "grpc"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_lb_traffic_extension._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dep.AuthzExtension( + name="name_value", + description="description_value", + load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, + authority="authority_value", + service="service_value", + fail_open=True, + forward_headers=["forward_headers_value"], + wire_format=dep.WireFormat.EXT_PROC_GRPC, + ) + response = client.get_authz_extension(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dep.GetAuthzExtensionRequest() + assert args[0] == request - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_lb_traffic_extension._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the response is the type that we expect. + assert isinstance(response, dep.AuthzExtension) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.load_balancing_scheme == dep.LoadBalancingScheme.INTERNAL_MANAGED + assert response.authority == "authority_value" + assert response.service == "service_value" + assert response.fail_open is True + assert response.forward_headers == ["forward_headers_value"] + assert response.wire_format == dep.WireFormat.EXT_PROC_GRPC - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +def test_get_authz_extension_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = dep.LbTrafficExtension() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dep.LbTrafficExtension.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_lb_traffic_extension(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_lb_traffic_extension_rest_unset_required_fields(): - transport = transports.DepServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials + transport="grpc", ) - unset_fields = transport.get_lb_traffic_extension._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_lb_traffic_extension_rest_flattened(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dep.GetAuthzExtensionRequest( + name="name_value", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = dep.LbTrafficExtension() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dep.LbTrafficExtension.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_lb_traffic_extension(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/lbTrafficExtensions/*}" - % client.transport._host, - args[1], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - - -def test_get_lb_traffic_extension_rest_flattened_error(transport: str = "rest"): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_lb_traffic_extension( - dep.GetLbTrafficExtensionRequest(), + client.get_authz_extension(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dep.GetAuthzExtensionRequest( name="name_value", ) -def test_create_lb_traffic_extension_rest_use_cached_wrapped_rpc(): +def test_get_authz_extension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -7490,8 +7661,7 @@ def test_create_lb_traffic_extension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_lb_traffic_extension - in client._transport._wrapped_methods + client._transport.get_authz_extension in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -7500,411 +7670,339 @@ def test_create_lb_traffic_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_lb_traffic_extension + client._transport.get_authz_extension ] = mock_rpc - request = {} - client.create_lb_traffic_extension(request) + client.get_authz_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_lb_traffic_extension(request) + client.get_authz_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_lb_traffic_extension_rest_required_fields( - request_type=dep.CreateLbTrafficExtensionRequest, +@pytest.mark.asyncio +async def test_get_authz_extension_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DepServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request_init["lb_traffic_extension_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped - assert "lbTrafficExtensionId" not in jsonified_request + # Ensure method has been cached + assert ( + client._client._transport.get_authz_extension + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_lb_traffic_extension._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_authz_extension + ] = mock_rpc - # verify required fields with default values are now present - assert "lbTrafficExtensionId" in jsonified_request - assert ( - jsonified_request["lbTrafficExtensionId"] - == request_init["lb_traffic_extension_id"] - ) + request = {} + await client.get_authz_extension(request) - jsonified_request["parent"] = "parent_value" - jsonified_request["lbTrafficExtensionId"] = "lb_traffic_extension_id_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_lb_traffic_extension._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "lb_traffic_extension_id", - "request_id", - ) - ) - jsonified_request.update(unset_fields) + await client.get_authz_extension(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "lbTrafficExtensionId" in jsonified_request - assert jsonified_request["lbTrafficExtensionId"] == "lb_traffic_extension_id_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_authz_extension_async( + transport: str = "grpc_asyncio", request_type=dep.GetAuthzExtensionRequest +): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.AuthzExtension( + name="name_value", + description="description_value", + load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, + authority="authority_value", + service="service_value", + fail_open=True, + forward_headers=["forward_headers_value"], + wire_format=dep.WireFormat.EXT_PROC_GRPC, + ) + ) + response = await client.get_authz_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dep.GetAuthzExtensionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dep.AuthzExtension) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.load_balancing_scheme == dep.LoadBalancingScheme.INTERNAL_MANAGED + assert response.authority == "authority_value" + assert response.service == "service_value" + assert response.fail_open is True + assert response.forward_headers == ["forward_headers_value"] + assert response.wire_format == dep.WireFormat.EXT_PROC_GRPC + + +@pytest.mark.asyncio +async def test_get_authz_extension_async_from_dict(): + await test_get_authz_extension_async(request_type=dict) + +def test_get_authz_extension_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.GetAuthzExtensionRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + call.return_value = dep.AuthzExtension() + client.get_authz_extension(request) - response = client.create_lb_traffic_extension(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "lbTrafficExtensionId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_create_lb_traffic_extension_rest_unset_required_fields(): - transport = transports.DepServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_authz_extension_field_headers_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_lb_traffic_extension._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "lbTrafficExtensionId", - "requestId", - ) - ) - & set( - ( - "parent", - "lbTrafficExtensionId", - "lbTrafficExtension", - ) - ) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.GetAuthzExtensionRequest() + request.name = "name_value" -def test_create_lb_traffic_extension_rest_flattened(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dep.AuthzExtension()) + await client.get_authz_extension(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - lb_traffic_extension=dep.LbTrafficExtension(name="name_value"), - lb_traffic_extension_id="lb_traffic_extension_id_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +def test_get_authz_extension_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.create_lb_traffic_extension(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dep.AuthzExtension() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_authz_extension( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/lbTrafficExtensions" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_create_lb_traffic_extension_rest_flattened_error(transport: str = "rest"): +def test_get_authz_extension_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_lb_traffic_extension( - dep.CreateLbTrafficExtensionRequest(), - parent="parent_value", - lb_traffic_extension=dep.LbTrafficExtension(name="name_value"), - lb_traffic_extension_id="lb_traffic_extension_id_value", + client.get_authz_extension( + dep.GetAuthzExtensionRequest(), + name="name_value", ) -def test_update_lb_traffic_extension_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.asyncio +async def test_get_authz_extension_flattened_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dep.AuthzExtension() - # Ensure method has been cached - assert ( - client._transport.update_lb_traffic_extension - in client._transport._wrapped_methods + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dep.AuthzExtension()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_authz_extension( + name="name_value", ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.update_lb_traffic_extension - ] = mock_rpc - - request = {} - client.update_lb_traffic_extension(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_lb_traffic_extension(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_update_lb_traffic_extension_rest_required_fields( - request_type=dep.UpdateLbTrafficExtensionRequest, -): - transport_class = transports.DepServiceRestTransport - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_get_authz_extension_flattened_error_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_lb_traffic_extension._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_lb_traffic_extension._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_authz_extension( + dep.GetAuthzExtensionRequest(), + name="name_value", ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone +@pytest.mark.parametrize( + "request_type", + [ + dep.CreateAuthzExtensionRequest, + dict, + ], +) +def test_create_authz_extension(request_type, transport: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.update_lb_traffic_extension(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_authz_extension(request) -def test_update_lb_traffic_extension_rest_unset_required_fields(): - transport = transports.DepServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dep.CreateAuthzExtensionRequest() + assert args[0] == request - unset_fields = transport.update_lb_traffic_extension._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set(("lbTrafficExtension",)) - ) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_update_lb_traffic_extension_rest_flattened(): +def test_create_authz_extension_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "lb_traffic_extension": { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - lb_traffic_extension=dep.LbTrafficExtension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_lb_traffic_extension(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{lb_traffic_extension.name=projects/*/locations/*/lbTrafficExtensions/*}" - % client.transport._host, - args[1], - ) - - -def test_update_lb_traffic_extension_rest_flattened_error(transport: str = "rest"): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dep.CreateAuthzExtensionRequest( + parent="parent_value", + authz_extension_id="authz_extension_id_value", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_lb_traffic_extension( - dep.UpdateLbTrafficExtensionRequest(), - lb_traffic_extension=dep.LbTrafficExtension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_authz_extension(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dep.CreateAuthzExtensionRequest( + parent="parent_value", + authz_extension_id="authz_extension_id_value", ) -def test_delete_lb_traffic_extension_rest_use_cached_wrapped_rpc(): +def test_create_authz_extension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -7913,7 +8011,7 @@ def test_delete_lb_traffic_extension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_lb_traffic_extension + client._transport.create_authz_extension in client._transport._wrapped_methods ) @@ -7923,183 +8021,359 @@ def test_delete_lb_traffic_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_lb_traffic_extension + client._transport.create_authz_extension ] = mock_rpc - request = {} - client.delete_lb_traffic_extension(request) + client.create_authz_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_lb_traffic_extension(request) + client.create_authz_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_lb_traffic_extension_rest_required_fields( - request_type=dep.DeleteLbTrafficExtensionRequest, +@pytest.mark.asyncio +async def test_create_authz_extension_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DepServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_authz_extension + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_authz_extension + ] = mock_rpc + + request = {} + await client.create_authz_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_authz_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_authz_extension_async( + transport: str = "grpc_asyncio", request_type=dep.CreateAuthzExtensionRequest +): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_lb_traffic_extension._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_authz_extension(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dep.CreateAuthzExtensionRequest() + assert args[0] == request - jsonified_request["name"] = "name_value" + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_lb_traffic_extension._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_create_authz_extension_async_from_dict(): + await test_create_authz_extension_async(request_type=dict) + +def test_create_authz_extension_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.CreateAuthzExtensionRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_authz_extension(request) - response = client.delete_lb_traffic_extension(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_delete_lb_traffic_extension_rest_unset_required_fields(): - transport = transports.DepServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_create_authz_extension_field_headers_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.delete_lb_traffic_extension._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.CreateAuthzExtensionRequest() + request.parent = "parent_value" -def test_delete_lb_traffic_extension_rest_flattened(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_authz_extension(request) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" - } + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +def test_create_authz_extension_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - client.delete_lb_traffic_extension(**mock_args) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_authz_extension( + parent="parent_value", + authz_extension=dep.AuthzExtension(name="name_value"), + authz_extension_id="authz_extension_id_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/lbTrafficExtensions/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].authz_extension + mock_val = dep.AuthzExtension(name="name_value") + assert arg == mock_val + arg = args[0].authz_extension_id + mock_val = "authz_extension_id_value" + assert arg == mock_val -def test_delete_lb_traffic_extension_rest_flattened_error(transport: str = "rest"): +def test_create_authz_extension_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_lb_traffic_extension( - dep.DeleteLbTrafficExtensionRequest(), - name="name_value", + client.create_authz_extension( + dep.CreateAuthzExtensionRequest(), + parent="parent_value", + authz_extension=dep.AuthzExtension(name="name_value"), + authz_extension_id="authz_extension_id_value", ) -def test_list_lb_route_extensions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.asyncio +async def test_create_authz_extension_flattened_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_authz_extension( + parent="parent_value", + authz_extension=dep.AuthzExtension(name="name_value"), + authz_extension_id="authz_extension_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].authz_extension + mock_val = dep.AuthzExtension(name="name_value") + assert arg == mock_val + arg = args[0].authz_extension_id + mock_val = "authz_extension_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_authz_extension_flattened_error_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_authz_extension( + dep.CreateAuthzExtensionRequest(), + parent="parent_value", + authz_extension=dep.AuthzExtension(name="name_value"), + authz_extension_id="authz_extension_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + dep.UpdateAuthzExtensionRequest, + dict, + ], +) +def test_update_authz_extension(request_type, transport: str = "grpc"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_authz_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dep.UpdateAuthzExtensionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_authz_extension_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dep.UpdateAuthzExtensionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_authz_extension(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dep.UpdateAuthzExtensionRequest() + + +def test_update_authz_extension_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert ( - client._transport.list_lb_route_extensions + client._transport.update_authz_extension in client._transport._wrapped_methods ) @@ -8109,436 +8383,344 @@ def test_list_lb_route_extensions_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_lb_route_extensions + client._transport.update_authz_extension ] = mock_rpc - request = {} - client.list_lb_route_extensions(request) + client.update_authz_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_lb_route_extensions(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_authz_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_lb_route_extensions_rest_required_fields( - request_type=dep.ListLbRouteExtensionsRequest, +@pytest.mark.asyncio +async def test_update_authz_extension_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DepServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.update_authz_extension + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_lb_route_extensions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_authz_extension + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.update_authz_extension(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_lb_route_extensions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + await client.update_authz_extension(request) - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Designate an appropriate value for the returned response. - return_value = dep.ListLbRouteExtensionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 +@pytest.mark.asyncio +async def test_update_authz_extension_async( + transport: str = "grpc_asyncio", request_type=dep.UpdateAuthzExtensionRequest +): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # Convert return value to protobuf type - return_value = dep.ListLbRouteExtensionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_authz_extension(request) - response = client.list_lb_route_extensions(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dep.UpdateAuthzExtensionRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_list_lb_route_extensions_rest_unset_required_fields(): - transport = transports.DepServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.list_lb_route_extensions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) +@pytest.mark.asyncio +async def test_update_authz_extension_async_from_dict(): + await test_update_authz_extension_async(request_type=dict) -def test_list_lb_route_extensions_rest_flattened(): +def test_update_authz_extension_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = dep.ListLbRouteExtensionsResponse() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.UpdateAuthzExtensionRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.authz_extension.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_authz_extension(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dep.ListLbRouteExtensionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.list_lb_route_extensions(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "authz_extension.name=name_value", + ) in kw["metadata"] - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/lbRouteExtensions" - % client.transport._host, - args[1], + +@pytest.mark.asyncio +async def test_update_authz_extension_field_headers_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.UpdateAuthzExtensionRequest() + + request.authz_extension.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.update_authz_extension(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_list_lb_route_extensions_rest_flattened_error(transport: str = "rest"): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "authz_extension.name=name_value", + ) in kw["metadata"] + + +def test_update_authz_extension_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_lb_route_extensions( - dep.ListLbRouteExtensionsRequest(), - parent="parent_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_authz_extension( + authz_extension=dep.AuthzExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].authz_extension + mock_val = dep.AuthzExtension(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + -def test_list_lb_route_extensions_rest_pager(transport: str = "rest"): +def test_update_authz_extension_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dep.ListLbRouteExtensionsResponse( - lb_route_extensions=[ - dep.LbRouteExtension(), - dep.LbRouteExtension(), - dep.LbRouteExtension(), - ], - next_page_token="abc", - ), - dep.ListLbRouteExtensionsResponse( - lb_route_extensions=[], - next_page_token="def", - ), - dep.ListLbRouteExtensionsResponse( - lb_route_extensions=[ - dep.LbRouteExtension(), - ], - next_page_token="ghi", - ), - dep.ListLbRouteExtensionsResponse( - lb_route_extensions=[ - dep.LbRouteExtension(), - dep.LbRouteExtension(), - ], - ), + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_authz_extension( + dep.UpdateAuthzExtensionRequest(), + authz_extension=dep.AuthzExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dep.ListLbRouteExtensionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.list_lb_route_extensions(request=sample_request) +@pytest.mark.asyncio +async def test_update_authz_extension_flattened_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dep.LbRouteExtension) for i in results) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") - pages = list(client.list_lb_route_extensions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_authz_extension( + authz_extension=dep.AuthzExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].authz_extension + mock_val = dep.AuthzExtension(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_get_lb_route_extension_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.asyncio +async def test_update_authz_extension_flattened_error_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Ensure method has been cached - assert ( - client._transport.get_lb_route_extension - in client._transport._wrapped_methods + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_authz_extension( + dep.UpdateAuthzExtensionRequest(), + authz_extension=dep.AuthzExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_lb_route_extension - ] = mock_rpc - - request = {} - client.get_lb_route_extension(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.get_lb_route_extension(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_get_lb_route_extension_rest_required_fields( - request_type=dep.GetLbRouteExtensionRequest, -): - transport_class = transports.DepServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_lb_route_extension._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_lb_route_extension._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.parametrize( + "request_type", + [ + dep.DeleteAuthzExtensionRequest, + dict, + ], +) +def test_delete_authz_extension(request_type, transport: str = "grpc"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = dep.LbRouteExtension() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dep.LbRouteExtension.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_lb_route_extension(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_authz_extension(request) -def test_get_lb_route_extension_rest_unset_required_fields(): - transport = transports.DepServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = dep.DeleteAuthzExtensionRequest() + assert args[0] == request - unset_fields = transport.get_lb_route_extension._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_get_lb_route_extension_rest_flattened(): +def test_delete_authz_extension_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = dep.LbRouteExtension() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dep.LbRouteExtension.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_lb_route_extension(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/lbRouteExtensions/*}" - % client.transport._host, - args[1], - ) - - -def test_get_lb_route_extension_rest_flattened_error(transport: str = "rest"): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = dep.DeleteAuthzExtensionRequest( + name="name_value", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_lb_route_extension( - dep.GetLbRouteExtensionRequest(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_extension), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_authz_extension(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dep.DeleteAuthzExtensionRequest( name="name_value", ) -def test_create_lb_route_extension_rest_use_cached_wrapped_rpc(): +def test_delete_authz_extension_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -8547,7 +8729,7 @@ def test_create_lb_route_extension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_lb_route_extension + client._transport.delete_authz_extension in client._transport._wrapped_methods ) @@ -8557,220 +8739,278 @@ def test_create_lb_route_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_lb_route_extension + client._transport.delete_authz_extension ] = mock_rpc - request = {} - client.create_lb_route_extension(request) + client.delete_authz_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_lb_route_extension(request) + client.delete_authz_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_lb_route_extension_rest_required_fields( - request_type=dep.CreateLbRouteExtensionRequest, +@pytest.mark.asyncio +async def test_delete_authz_extension_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.DepServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["lb_route_extension_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped - assert "lbRouteExtensionId" not in jsonified_request + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_lb_route_extension._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.delete_authz_extension + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present - assert "lbRouteExtensionId" in jsonified_request - assert ( - jsonified_request["lbRouteExtensionId"] == request_init["lb_route_extension_id"] - ) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_authz_extension + ] = mock_rpc - jsonified_request["parent"] = "parent_value" - jsonified_request["lbRouteExtensionId"] = "lb_route_extension_id_value" + request = {} + await client.delete_authz_extension(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_lb_route_extension._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "lb_route_extension_id", - "request_id", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_authz_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_authz_extension_async( + transport: str = "grpc_asyncio", request_type=dep.DeleteAuthzExtensionRequest +): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "lbRouteExtensionId" in jsonified_request - assert jsonified_request["lbRouteExtensionId"] == "lb_route_extension_id_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_authz_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = dep.DeleteAuthzExtensionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + +@pytest.mark.asyncio +async def test_delete_authz_extension_async_from_dict(): + await test_delete_authz_extension_async(request_type=dict) + + +def test_delete_authz_extension_field_headers(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.DeleteAuthzExtensionRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_authz_extension(request) - response = client.create_lb_route_extension(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "lbRouteExtensionId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_create_lb_route_extension_rest_unset_required_fields(): - transport = transports.DepServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_delete_authz_extension_field_headers_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_lb_route_extension._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "lbRouteExtensionId", - "requestId", - ) - ) - & set( - ( - "parent", - "lbRouteExtensionId", - "lbRouteExtension", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dep.DeleteAuthzExtensionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_extension), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - ) + await client.delete_authz_extension(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_create_lb_route_extension_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_authz_extension_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - lb_route_extension=dep.LbRouteExtension(name="name_value"), - lb_route_extension_id="lb_route_extension_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_authz_extension( + name="name_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_lb_route_extension(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/lbRouteExtensions" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_create_lb_route_extension_rest_flattened_error(transport: str = "rest"): +def test_delete_authz_extension_flattened_error(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_lb_route_extension( - dep.CreateLbRouteExtensionRequest(), - parent="parent_value", - lb_route_extension=dep.LbRouteExtension(name="name_value"), - lb_route_extension_id="lb_route_extension_id_value", + client.delete_authz_extension( + dep.DeleteAuthzExtensionRequest(), + name="name_value", ) -def test_update_lb_route_extension_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.asyncio +async def test_delete_authz_extension_flattened_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") - # Ensure method has been cached + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_authz_extension( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_authz_extension_flattened_error_async(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_authz_extension( + dep.DeleteAuthzExtensionRequest(), + name="name_value", + ) + + +def test_list_lb_traffic_extensions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached assert ( - client._transport.update_lb_route_extension + client._transport.list_lb_traffic_extensions in client._transport._wrapped_methods ) @@ -8780,32 +9020,29 @@ def test_update_lb_route_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_lb_route_extension + client._transport.list_lb_traffic_extensions ] = mock_rpc request = {} - client.update_lb_route_extension(request) + client.list_lb_traffic_extensions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_lb_route_extension(request) + client.list_lb_traffic_extensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_lb_route_extension_rest_required_fields( - request_type=dep.UpdateLbRouteExtensionRequest, +def test_list_lb_traffic_extensions_rest_required_fields( + request_type=dep.ListLbTrafficExtensionsRequest, ): transport_class = transports.DepServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -8816,24 +9053,30 @@ def test_update_lb_route_extension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_lb_route_extension._get_unset_required_fields(jsonified_request) + ).list_lb_traffic_extensions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_lb_route_extension._get_unset_required_fields(jsonified_request) + ).list_lb_traffic_extensions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8842,7 +9085,7 @@ def test_update_lb_route_extension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = dep.ListLbTrafficExtensionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -8854,45 +9097,49 @@ def test_update_lb_route_extension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.ListLbTrafficExtensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_lb_route_extension(request) + response = client.list_lb_traffic_extensions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_lb_route_extension_rest_unset_required_fields(): +def test_list_lb_traffic_extensions_rest_unset_required_fields(): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_lb_route_extension._get_unset_required_fields({}) + unset_fields = transport.list_lb_traffic_extensions._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - & set(("lbRouteExtension",)) + & set(("parent",)) ) -def test_update_lb_route_extension_rest_flattened(): +def test_list_lb_traffic_extensions_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -8901,44 +9148,41 @@ def test_update_lb_route_extension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = dep.ListLbTrafficExtensionsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "lb_route_extension": { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - lb_route_extension=dep.LbRouteExtension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dep.ListLbTrafficExtensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_lb_route_extension(**mock_args) + client.list_lb_traffic_extensions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{lb_route_extension.name=projects/*/locations/*/lbRouteExtensions/*}" + "%s/v1/{parent=projects/*/locations/*}/lbTrafficExtensions" % client.transport._host, args[1], ) -def test_update_lb_route_extension_rest_flattened_error(transport: str = "rest"): +def test_list_lb_traffic_extensions_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8947,14 +9191,76 @@ def test_update_lb_route_extension_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_lb_route_extension( - dep.UpdateLbRouteExtensionRequest(), - lb_route_extension=dep.LbRouteExtension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_lb_traffic_extensions( + dep.ListLbTrafficExtensionsRequest(), + parent="parent_value", ) -def test_delete_lb_route_extension_rest_use_cached_wrapped_rpc(): +def test_list_lb_traffic_extensions_rest_pager(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dep.ListLbTrafficExtensionsResponse( + lb_traffic_extensions=[ + dep.LbTrafficExtension(), + dep.LbTrafficExtension(), + dep.LbTrafficExtension(), + ], + next_page_token="abc", + ), + dep.ListLbTrafficExtensionsResponse( + lb_traffic_extensions=[], + next_page_token="def", + ), + dep.ListLbTrafficExtensionsResponse( + lb_traffic_extensions=[ + dep.LbTrafficExtension(), + ], + next_page_token="ghi", + ), + dep.ListLbTrafficExtensionsResponse( + lb_traffic_extensions=[ + dep.LbTrafficExtension(), + dep.LbTrafficExtension(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + dep.ListLbTrafficExtensionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_lb_traffic_extensions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dep.LbTrafficExtension) for i in results) + + pages = list(client.list_lb_traffic_extensions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_lb_traffic_extension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8969,7 +9275,7 @@ def test_delete_lb_route_extension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_lb_route_extension + client._transport.get_lb_traffic_extension in client._transport._wrapped_methods ) @@ -8979,28 +9285,24 @@ def test_delete_lb_route_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_lb_route_extension + client._transport.get_lb_traffic_extension ] = mock_rpc request = {} - client.delete_lb_route_extension(request) + client.get_lb_traffic_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_lb_route_extension(request) + client.get_lb_traffic_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_lb_route_extension_rest_required_fields( - request_type=dep.DeleteLbRouteExtensionRequest, +def test_get_lb_traffic_extension_rest_required_fields( + request_type=dep.GetLbTrafficExtensionRequest, ): transport_class = transports.DepServiceRestTransport @@ -9016,7 +9318,7 @@ def test_delete_lb_route_extension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_lb_route_extension._get_unset_required_fields(jsonified_request) + ).get_lb_traffic_extension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9025,9 +9327,7 @@ def test_delete_lb_route_extension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_lb_route_extension._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).get_lb_traffic_extension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -9041,7 +9341,7 @@ def test_delete_lb_route_extension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = dep.LbTrafficExtension() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9053,36 +9353,39 @@ def test_delete_lb_route_extension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.LbTrafficExtension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_lb_route_extension(request) + response = client.get_lb_traffic_extension(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_lb_route_extension_rest_unset_required_fields(): +def test_get_lb_traffic_extension_rest_unset_required_fields(): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_lb_route_extension._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.get_lb_traffic_extension._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_lb_route_extension_rest_flattened(): +def test_get_lb_traffic_extension_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9091,11 +9394,11 @@ def test_delete_lb_route_extension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = dep.LbTrafficExtension() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" } # get truthy value for each flattened field @@ -9107,25 +9410,27 @@ def test_delete_lb_route_extension_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dep.LbTrafficExtension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_lb_route_extension(**mock_args) + client.get_lb_traffic_extension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/lbRouteExtensions/*}" + "%s/v1/{name=projects/*/locations/*/lbTrafficExtensions/*}" % client.transport._host, args[1], ) -def test_delete_lb_route_extension_rest_flattened_error(transport: str = "rest"): +def test_get_lb_traffic_extension_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9134,13 +9439,13 @@ def test_delete_lb_route_extension_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_lb_route_extension( - dep.DeleteLbRouteExtensionRequest(), + client.get_lb_traffic_extension( + dep.GetLbTrafficExtensionRequest(), name="name_value", ) -def test_list_authz_extensions_rest_use_cached_wrapped_rpc(): +def test_create_lb_traffic_extension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9155,7 +9460,7 @@ def test_list_authz_extensions_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_authz_extensions + client._transport.create_lb_traffic_extension in client._transport._wrapped_methods ) @@ -9165,29 +9470,34 @@ def test_list_authz_extensions_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_authz_extensions + client._transport.create_lb_traffic_extension ] = mock_rpc request = {} - client.list_authz_extensions(request) + client.create_lb_traffic_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_authz_extensions(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_lb_traffic_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_authz_extensions_rest_required_fields( - request_type=dep.ListAuthzExtensionsRequest, +def test_create_lb_traffic_extension_rest_required_fields( + request_type=dep.CreateLbTrafficExtensionRequest, ): transport_class = transports.DepServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["lb_traffic_extension_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9195,26 +9505,31 @@ def test_list_authz_extensions_rest_required_fields( ) # verify fields with default values are dropped + assert "lbTrafficExtensionId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_authz_extensions._get_unset_required_fields(jsonified_request) + ).create_lb_traffic_extension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "lbTrafficExtensionId" in jsonified_request + assert ( + jsonified_request["lbTrafficExtensionId"] + == request_init["lb_traffic_extension_id"] + ) jsonified_request["parent"] = "parent_value" + jsonified_request["lbTrafficExtensionId"] = "lb_traffic_extension_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_authz_extensions._get_unset_required_fields(jsonified_request) + ).create_lb_traffic_extension._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "lb_traffic_extension_id", + "request_id", ) ) jsonified_request.update(unset_fields) @@ -9222,6 +9537,8 @@ def test_list_authz_extensions_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "lbTrafficExtensionId" in jsonified_request + assert jsonified_request["lbTrafficExtensionId"] == "lb_traffic_extension_id_value" client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9230,7 +9547,7 @@ def test_list_authz_extensions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = dep.ListAuthzExtensionsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9242,49 +9559,57 @@ def test_list_authz_extensions_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dep.ListAuthzExtensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_authz_extensions(request) + response = client.create_lb_traffic_extension(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "lbTrafficExtensionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_authz_extensions_rest_unset_required_fields(): +def test_create_lb_traffic_extension_rest_unset_required_fields(): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_authz_extensions._get_unset_required_fields({}) + unset_fields = transport.create_lb_traffic_extension._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "lbTrafficExtensionId", + "requestId", + ) + ) + & set( + ( + "parent", + "lbTrafficExtensionId", + "lbTrafficExtension", ) ) - & set(("parent",)) ) -def test_list_authz_extensions_rest_flattened(): +def test_create_lb_traffic_extension_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9293,7 +9618,7 @@ def test_list_authz_extensions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = dep.ListAuthzExtensionsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -9301,33 +9626,33 @@ def test_list_authz_extensions_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + lb_traffic_extension=dep.LbTrafficExtension(name="name_value"), + lb_traffic_extension_id="lb_traffic_extension_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dep.ListAuthzExtensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_authz_extensions(**mock_args) + client.create_lb_traffic_extension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/authzExtensions" + "%s/v1/{parent=projects/*/locations/*}/lbTrafficExtensions" % client.transport._host, args[1], ) -def test_list_authz_extensions_rest_flattened_error(transport: str = "rest"): +def test_create_lb_traffic_extension_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9336,74 +9661,15 @@ def test_list_authz_extensions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_authz_extensions( - dep.ListAuthzExtensionsRequest(), + client.create_lb_traffic_extension( + dep.CreateLbTrafficExtensionRequest(), parent="parent_value", + lb_traffic_extension=dep.LbTrafficExtension(name="name_value"), + lb_traffic_extension_id="lb_traffic_extension_id_value", ) -def test_list_authz_extensions_rest_pager(transport: str = "rest"): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), - dep.AuthzExtension(), - ], - next_page_token="abc", - ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[], - next_page_token="def", - ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - ], - next_page_token="ghi", - ), - dep.ListAuthzExtensionsResponse( - authz_extensions=[ - dep.AuthzExtension(), - dep.AuthzExtension(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(dep.ListAuthzExtensionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_authz_extensions(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, dep.AuthzExtension) for i in results) - - pages = list(client.list_authz_extensions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_authz_extension_rest_use_cached_wrapped_rpc(): +def test_update_lb_traffic_extension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9418,7 +9684,8 @@ def test_get_authz_extension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_authz_extension in client._transport._wrapped_methods + client._transport.update_lb_traffic_extension + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -9427,29 +9694,32 @@ def test_get_authz_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_authz_extension + client._transport.update_lb_traffic_extension ] = mock_rpc request = {} - client.get_authz_extension(request) + client.update_lb_traffic_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_authz_extension(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_lb_traffic_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_authz_extension_rest_required_fields( - request_type=dep.GetAuthzExtensionRequest, +def test_update_lb_traffic_extension_rest_required_fields( + request_type=dep.UpdateLbTrafficExtensionRequest, ): transport_class = transports.DepServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9460,21 +9730,24 @@ def test_get_authz_extension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_authz_extension._get_unset_required_fields(jsonified_request) + ).update_lb_traffic_extension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_authz_extension._get_unset_required_fields(jsonified_request) + ).update_lb_traffic_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9483,7 +9756,7 @@ def test_get_authz_extension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = dep.AuthzExtension() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9495,39 +9768,45 @@ def test_get_authz_extension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = dep.AuthzExtension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_authz_extension(request) + response = client.update_lb_traffic_extension(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_authz_extension_rest_unset_required_fields(): +def test_update_lb_traffic_extension_rest_unset_required_fields(): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_authz_extension._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_lb_traffic_extension._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("lbTrafficExtension",)) + ) -def test_get_authz_extension_rest_flattened(): +def test_update_lb_traffic_extension_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9536,43 +9815,44 @@ def test_get_authz_extension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = dep.AuthzExtension() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/authzExtensions/sample3" + "lb_traffic_extension": { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + lb_traffic_extension=dep.LbTrafficExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = dep.AuthzExtension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_authz_extension(**mock_args) + client.update_lb_traffic_extension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/authzExtensions/*}" + "%s/v1/{lb_traffic_extension.name=projects/*/locations/*/lbTrafficExtensions/*}" % client.transport._host, args[1], ) -def test_get_authz_extension_rest_flattened_error(transport: str = "rest"): +def test_update_lb_traffic_extension_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9581,13 +9861,14 @@ def test_get_authz_extension_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_authz_extension( - dep.GetAuthzExtensionRequest(), - name="name_value", + client.update_lb_traffic_extension( + dep.UpdateLbTrafficExtensionRequest(), + lb_traffic_extension=dep.LbTrafficExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_authz_extension_rest_use_cached_wrapped_rpc(): +def test_delete_lb_traffic_extension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9602,7 +9883,7 @@ def test_create_authz_extension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_authz_extension + client._transport.delete_lb_traffic_extension in client._transport._wrapped_methods ) @@ -9612,11 +9893,11 @@ def test_create_authz_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_authz_extension + client._transport.delete_lb_traffic_extension ] = mock_rpc request = {} - client.create_authz_extension(request) + client.delete_lb_traffic_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -9625,21 +9906,20 @@ def test_create_authz_extension_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_authz_extension(request) + client.delete_lb_traffic_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_authz_extension_rest_required_fields( - request_type=dep.CreateAuthzExtensionRequest, +def test_delete_lb_traffic_extension_rest_required_fields( + request_type=dep.DeleteLbTrafficExtensionRequest, ): transport_class = transports.DepServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["authz_extension_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9647,37 +9927,26 @@ def test_create_authz_extension_rest_required_fields( ) # verify fields with default values are dropped - assert "authzExtensionId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_authz_extension._get_unset_required_fields(jsonified_request) + ).delete_lb_traffic_extension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "authzExtensionId" in jsonified_request - assert jsonified_request["authzExtensionId"] == request_init["authz_extension_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["authzExtensionId"] = "authz_extension_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_authz_extension._get_unset_required_fields(jsonified_request) + ).delete_lb_traffic_extension._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "authz_extension_id", - "request_id", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "authzExtensionId" in jsonified_request - assert jsonified_request["authzExtensionId"] == "authz_extension_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9698,10 +9967,9 @@ def test_create_authz_extension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -9712,43 +9980,23 @@ def test_create_authz_extension_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_authz_extension(request) + response = client.delete_lb_traffic_extension(request) - expected_params = [ - ( - "authzExtensionId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_authz_extension_rest_unset_required_fields(): +def test_delete_lb_traffic_extension_rest_unset_required_fields(): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_authz_extension._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "authzExtensionId", - "requestId", - ) - ) - & set( - ( - "parent", - "authzExtensionId", - "authzExtension", - ) - ) - ) + unset_fields = transport.delete_lb_traffic_extension._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) -def test_create_authz_extension_rest_flattened(): +def test_delete_lb_traffic_extension_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9760,13 +10008,13 @@ def test_create_authz_extension_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - authz_extension=dep.AuthzExtension(name="name_value"), - authz_extension_id="authz_extension_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -9778,20 +10026,20 @@ def test_create_authz_extension_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_authz_extension(**mock_args) + client.delete_lb_traffic_extension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/authzExtensions" + "%s/v1/{name=projects/*/locations/*/lbTrafficExtensions/*}" % client.transport._host, args[1], ) -def test_create_authz_extension_rest_flattened_error(transport: str = "rest"): +def test_delete_lb_traffic_extension_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9800,15 +10048,13 @@ def test_create_authz_extension_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_authz_extension( - dep.CreateAuthzExtensionRequest(), - parent="parent_value", - authz_extension=dep.AuthzExtension(name="name_value"), - authz_extension_id="authz_extension_id_value", + client.delete_lb_traffic_extension( + dep.DeleteLbTrafficExtensionRequest(), + name="name_value", ) -def test_update_authz_extension_rest_use_cached_wrapped_rpc(): +def test_list_lb_route_extensions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9823,7 +10069,7 @@ def test_update_authz_extension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_authz_extension + client._transport.list_lb_route_extensions in client._transport._wrapped_methods ) @@ -9833,32 +10079,29 @@ def test_update_authz_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_authz_extension + client._transport.list_lb_route_extensions ] = mock_rpc request = {} - client.update_authz_extension(request) + client.list_lb_route_extensions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_authz_extension(request) + client.list_lb_route_extensions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_authz_extension_rest_required_fields( - request_type=dep.UpdateAuthzExtensionRequest, +def test_list_lb_route_extensions_rest_required_fields( + request_type=dep.ListLbRouteExtensionsRequest, ): transport_class = transports.DepServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -9869,24 +10112,30 @@ def test_update_authz_extension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_authz_extension._get_unset_required_fields(jsonified_request) + ).list_lb_route_extensions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_authz_extension._get_unset_required_fields(jsonified_request) + ).list_lb_route_extensions._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9895,7 +10144,7 @@ def test_update_authz_extension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = dep.ListLbRouteExtensionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -9907,50 +10156,49 @@ def test_update_authz_extension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.ListLbRouteExtensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_authz_extension(request) + response = client.list_lb_route_extensions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_authz_extension_rest_unset_required_fields(): +def test_list_lb_route_extensions_rest_unset_required_fields(): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_authz_extension._get_unset_required_fields({}) + unset_fields = transport.list_lb_route_extensions._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "authzExtension", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_update_authz_extension_rest_flattened(): +def test_list_lb_route_extensions_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -9959,44 +10207,41 @@ def test_update_authz_extension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = dep.ListLbRouteExtensionsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "authz_extension": { - "name": "projects/sample1/locations/sample2/authzExtensions/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - authz_extension=dep.AuthzExtension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dep.ListLbRouteExtensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_authz_extension(**mock_args) + client.list_lb_route_extensions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{authz_extension.name=projects/*/locations/*/authzExtensions/*}" + "%s/v1/{parent=projects/*/locations/*}/lbRouteExtensions" % client.transport._host, args[1], ) -def test_update_authz_extension_rest_flattened_error(transport: str = "rest"): +def test_list_lb_route_extensions_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10005,14 +10250,74 @@ def test_update_authz_extension_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_authz_extension( - dep.UpdateAuthzExtensionRequest(), - authz_extension=dep.AuthzExtension(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_lb_route_extensions( + dep.ListLbRouteExtensionsRequest(), + parent="parent_value", ) -def test_delete_authz_extension_rest_use_cached_wrapped_rpc(): +def test_list_lb_route_extensions_rest_pager(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dep.ListLbRouteExtensionsResponse( + lb_route_extensions=[ + dep.LbRouteExtension(), + dep.LbRouteExtension(), + dep.LbRouteExtension(), + ], + next_page_token="abc", + ), + dep.ListLbRouteExtensionsResponse( + lb_route_extensions=[], + next_page_token="def", + ), + dep.ListLbRouteExtensionsResponse( + lb_route_extensions=[ + dep.LbRouteExtension(), + ], + next_page_token="ghi", + ), + dep.ListLbRouteExtensionsResponse( + lb_route_extensions=[ + dep.LbRouteExtension(), + dep.LbRouteExtension(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dep.ListLbRouteExtensionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_lb_route_extensions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dep.LbRouteExtension) for i in results) + + pages = list(client.list_lb_route_extensions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_lb_route_extension_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10027,7 +10332,7 @@ def test_delete_authz_extension_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_authz_extension + client._transport.get_lb_route_extension in client._transport._wrapped_methods ) @@ -10037,28 +10342,24 @@ def test_delete_authz_extension_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_authz_extension + client._transport.get_lb_route_extension ] = mock_rpc request = {} - client.delete_authz_extension(request) + client.get_lb_route_extension(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_authz_extension(request) + client.get_lb_route_extension(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_authz_extension_rest_required_fields( - request_type=dep.DeleteAuthzExtensionRequest, +def test_get_lb_route_extension_rest_required_fields( + request_type=dep.GetLbRouteExtensionRequest, ): transport_class = transports.DepServiceRestTransport @@ -10074,7 +10375,7 @@ def test_delete_authz_extension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_authz_extension._get_unset_required_fields(jsonified_request) + ).get_lb_route_extension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -10083,9 +10384,7 @@ def test_delete_authz_extension_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_authz_extension._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).get_lb_route_extension._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10099,7 +10398,7 @@ def test_delete_authz_extension_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = dep.LbRouteExtension() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -10111,36 +10410,39 @@ def test_delete_authz_extension_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.LbRouteExtension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_authz_extension(request) + response = client.get_lb_route_extension(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_authz_extension_rest_unset_required_fields(): +def test_get_lb_route_extension_rest_unset_required_fields(): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_authz_extension._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.get_lb_route_extension._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_authz_extension_rest_flattened(): +def test_get_lb_route_extension_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -10149,11 +10451,11 @@ def test_delete_authz_extension_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = dep.LbRouteExtension() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/authzExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" } # get truthy value for each flattened field @@ -10165,25 +10467,27 @@ def test_delete_authz_extension_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dep.LbRouteExtension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_authz_extension(**mock_args) + client.get_lb_route_extension(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/authzExtensions/*}" + "%s/v1/{name=projects/*/locations/*/lbRouteExtensions/*}" % client.transport._host, args[1], ) -def test_delete_authz_extension_rest_flattened_error(transport: str = "rest"): +def test_get_lb_route_extension_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10192,919 +10496,4756 @@ def test_delete_authz_extension_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_authz_extension( - dep.DeleteAuthzExtensionRequest(), + client.get_lb_route_extension( + dep.GetLbRouteExtensionRequest(), name="name_value", ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DepServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_create_lb_route_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DepServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DepServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a transport instance. - transport = transports.DepServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DepServiceClient( - client_options=options, - transport=transport, + # Ensure method has been cached + assert ( + client._transport.create_lb_route_extension + in client._transport._wrapped_methods ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DepServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.create_lb_route_extension + ] = mock_rpc - # It is an error to provide scopes and a transport instance. - transport = transports.DepServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DepServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + request = {} + client.create_lb_route_extension(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DepServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DepServiceClient(transport=transport) - assert client.transport is transport + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + client.create_lb_route_extension(request) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DepServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - transport = transports.DepServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel +def test_create_lb_route_extension_rest_required_fields( + request_type=dep.CreateLbRouteExtensionRequest, +): + transport_class = transports.DepServiceRestTransport -@pytest.mark.parametrize( - "transport_class", - [ - transports.DepServiceGrpcTransport, - transports.DepServiceGrpcAsyncIOTransport, - transports.DepServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + request_init = {} + request_init["parent"] = "" + request_init["lb_route_extension_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped + assert "lbRouteExtensionId" not in jsonified_request -def test_transport_kind_grpc(): - transport = DepServiceClient.get_transport_class("grpc")( + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() + ).create_lb_route_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "lbRouteExtensionId" in jsonified_request + assert ( + jsonified_request["lbRouteExtensionId"] == request_init["lb_route_extension_id"] ) - assert transport.kind == "grpc" + jsonified_request["parent"] = "parent_value" + jsonified_request["lbRouteExtensionId"] = "lb_route_extension_id_value" -def test_initialize_client_w_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_lb_route_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "lb_route_extension_id", + "request_id", + ) ) - assert client is not None + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "lbRouteExtensionId" in jsonified_request + assert jsonified_request["lbRouteExtensionId"] == "lb_route_extension_id_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_lb_traffic_extensions_empty_call_grpc(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lb_traffic_extensions), "__call__" - ) as call: - call.return_value = dep.ListLbTrafficExtensionsResponse() - client.list_lb_traffic_extensions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.ListLbTrafficExtensionsRequest() - - assert args[0] == request_msg - + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_lb_traffic_extension_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lb_traffic_extension), "__call__" - ) as call: - call.return_value = dep.LbTrafficExtension() - client.get_lb_traffic_extension(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.GetLbTrafficExtensionRequest() + response = client.create_lb_route_extension(request) - assert args[0] == request_msg + expected_params = [ + ( + "lbRouteExtensionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_lb_traffic_extension_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_create_lb_route_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lb_traffic_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_lb_traffic_extension(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.CreateLbTrafficExtensionRequest() - - assert args[0] == request_msg + unset_fields = transport.create_lb_route_extension._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "lbRouteExtensionId", + "requestId", + ) + ) + & set( + ( + "parent", + "lbRouteExtensionId", + "lbRouteExtension", + ) + ) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_lb_traffic_extension_empty_call_grpc(): +def test_create_lb_route_extension_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lb_traffic_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_lb_traffic_extension(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.UpdateLbTrafficExtensionRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_lb_traffic_extension_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + lb_route_extension=dep.LbRouteExtension(name="name_value"), + lb_route_extension_id="lb_route_extension_id_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lb_traffic_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_lb_traffic_extension(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.DeleteLbTrafficExtensionRequest() + client.create_lb_route_extension(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/lbRouteExtensions" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_lb_route_extensions_empty_call_grpc(): +def test_create_lb_route_extension_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lb_route_extensions), "__call__" - ) as call: - call.return_value = dep.ListLbRouteExtensionsResponse() - client.list_lb_route_extensions(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.ListLbRouteExtensionsRequest() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_lb_route_extension( + dep.CreateLbRouteExtensionRequest(), + parent="parent_value", + lb_route_extension=dep.LbRouteExtension(name="name_value"), + lb_route_extension_id="lb_route_extension_id_value", + ) - assert args[0] == request_msg +def test_update_lb_route_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_lb_route_extension_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lb_route_extension), "__call__" - ) as call: - call.return_value = dep.LbRouteExtension() - client.get_lb_route_extension(request=None) + # Ensure method has been cached + assert ( + client._transport.update_lb_route_extension + in client._transport._wrapped_methods + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.GetLbRouteExtensionRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_lb_route_extension + ] = mock_rpc - assert args[0] == request_msg + request = {} + client.update_lb_route_extension(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_lb_route_extension_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lb_route_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_lb_route_extension(request=None) + client.update_lb_route_extension(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.CreateLbRouteExtensionRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_update_lb_route_extension_rest_required_fields( + request_type=dep.UpdateLbRouteExtensionRequest, +): + transport_class = transports.DepServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_lb_route_extension_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lb_route_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_lb_route_extension(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.UpdateLbRouteExtensionRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_lb_route_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_lb_route_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_lb_route_extension_empty_call_grpc(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lb_route_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_lb_route_extension(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.DeleteLbRouteExtensionRequest() - - assert args[0] == request_msg - + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_authz_extensions_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" - ) as call: - call.return_value = dep.ListAuthzExtensionsResponse() - client.list_authz_extensions(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.ListAuthzExtensionsRequest() + response = client.update_lb_route_extension(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_authz_extension_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_update_lb_route_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" - ) as call: - call.return_value = dep.AuthzExtension() - client.get_authz_extension(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.GetAuthzExtensionRequest() - - assert args[0] == request_msg + unset_fields = transport.update_lb_route_extension._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("lbRouteExtension",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_authz_extension_empty_call_grpc(): +def test_update_lb_route_extension_rest_flattened(): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_authz_extension(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.CreateAuthzExtensionRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "lb_route_extension": { + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + } + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_authz_extension_empty_call_grpc(): - client = DepServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + lb_route_extension=dep.LbRouteExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_authz_extension(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.UpdateAuthzExtensionRequest() + client.update_lb_route_extension(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{lb_route_extension.name=projects/*/locations/*/lbRouteExtensions/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_authz_extension_empty_call_grpc(): +def test_update_lb_route_extension_rest_flattened_error(transport: str = "rest"): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_authz_extension(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_lb_route_extension( + dep.UpdateLbRouteExtensionRequest(), + lb_route_extension=dep.LbRouteExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.DeleteAuthzExtensionRequest() - assert args[0] == request_msg +def test_delete_lb_route_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -def test_transport_kind_grpc_asyncio(): - transport = DepServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + # Ensure method has been cached + assert ( + client._transport.delete_lb_route_extension + in client._transport._wrapped_methods + ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_lb_route_extension + ] = mock_rpc -def test_initialize_client_w_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None + request = {} + client.delete_lb_route_extension(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_lb_traffic_extensions_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lb_traffic_extensions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.ListLbTrafficExtensionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_lb_traffic_extensions(request=None) + client.delete_lb_route_extension(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.ListLbTrafficExtensionsRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_delete_lb_route_extension_rest_required_fields( + request_type=dep.DeleteLbRouteExtensionRequest, +): + transport_class = transports.DepServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_lb_traffic_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lb_traffic_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.LbTrafficExtension( - name="name_value", - description="description_value", - forwarding_rules=["forwarding_rules_value"], - load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, - ) - ) - await client.get_lb_traffic_extension(request=None) + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.GetLbTrafficExtensionRequest() + # verify fields with default values are dropped - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_lb_route_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_lb_traffic_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + jsonified_request["name"] = "name_value" - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lb_traffic_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_lb_traffic_extension(request=None) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_lb_route_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.CreateLbTrafficExtensionRequest() + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - assert args[0] == request_msg + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_lb_traffic_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lb_traffic_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_lb_traffic_extension(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.UpdateLbTrafficExtensionRequest() + response = client.delete_lb_route_extension(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_lb_traffic_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_lb_route_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lb_traffic_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_lb_traffic_extension(request=None) + unset_fields = transport.delete_lb_route_extension._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.DeleteLbTrafficExtensionRequest() - assert args[0] == request_msg +def test_delete_lb_route_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_lb_route_extensions_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + } - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_lb_route_extensions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.ListLbRouteExtensionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.list_lb_route_extensions(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.ListLbRouteExtensionsRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.delete_lb_route_extension(**mock_args) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/lbRouteExtensions/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_lb_route_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_delete_lb_route_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_lb_route_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.LbRouteExtension( - name="name_value", - description="description_value", - forwarding_rules=["forwarding_rules_value"], - load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_lb_route_extension( + dep.DeleteLbRouteExtensionRequest(), + name="name_value", ) - await client.get_lb_route_extension(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.GetLbRouteExtensionRequest() - assert args[0] == request_msg +def test_list_lb_edge_extensions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_lb_route_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert ( + client._transport.list_lb_edge_extensions + in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_lb_route_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.create_lb_route_extension(request=None) + client._transport._wrapped_methods[ + client._transport.list_lb_edge_extensions + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.CreateLbRouteExtensionRequest() + request = {} + client.list_lb_edge_extensions(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_lb_edge_extensions(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_lb_route_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_lb_edge_extensions_rest_required_fields( + request_type=dep.ListLbEdgeExtensionsRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_lb_route_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_lb_route_extension(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.UpdateLbRouteExtensionRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_lb_edge_extensions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_lb_route_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_lb_edge_extensions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) ) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_lb_route_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_lb_route_extension(request=None) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.DeleteLbRouteExtensionRequest() + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = dep.ListLbEdgeExtensionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_authz_extensions_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = dep.ListLbEdgeExtensionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_authz_extensions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.ListAuthzExtensionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_authz_extensions(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.ListAuthzExtensionsRequest() + response = client.list_lb_edge_extensions(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_authz_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_lb_edge_extensions_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_authz_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - dep.AuthzExtension( - name="name_value", - description="description_value", - load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, - authority="authority_value", - service="service_value", - fail_open=True, - forward_headers=["forward_headers_value"], - wire_format=dep.WireFormat.EXT_PROC_GRPC, + unset_fields = transport.list_lb_edge_extensions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - await client.get_authz_extension(request=None) + & set(("parent",)) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.GetAuthzExtensionRequest() - assert args[0] == request_msg +def test_list_lb_edge_extensions_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dep.ListLbEdgeExtensionsResponse() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_authz_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_authz_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", ) - await client.create_authz_extension(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.CreateAuthzExtensionRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dep.ListLbEdgeExtensionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.list_lb_edge_extensions(**mock_args) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/lbEdgeExtensions" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_authz_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + +def test_list_lb_edge_extensions_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_lb_edge_extensions( + dep.ListLbEdgeExtensionsRequest(), + parent="parent_value", + ) + + +def test_list_lb_edge_extensions_rest_pager(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), + ], + next_page_token="abc", + ), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[], + next_page_token="def", + ), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + ], + next_page_token="ghi", + ), + dep.ListLbEdgeExtensionsResponse( + lb_edge_extensions=[ + dep.LbEdgeExtension(), + dep.LbEdgeExtension(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dep.ListLbEdgeExtensionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_lb_edge_extensions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dep.LbEdgeExtension) for i in results) + + pages = list(client.list_lb_edge_extensions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_lb_edge_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_lb_edge_extension + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_lb_edge_extension + ] = mock_rpc + + request = {} + client.get_lb_edge_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_lb_edge_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_lb_edge_extension_rest_required_fields( + request_type=dep.GetLbEdgeExtensionRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_lb_edge_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_lb_edge_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dep.LbEdgeExtension() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.LbEdgeExtension.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_lb_edge_extension(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_lb_edge_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_lb_edge_extension._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_lb_edge_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dep.LbEdgeExtension() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dep.LbEdgeExtension.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_lb_edge_extension(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/lbEdgeExtensions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_lb_edge_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_lb_edge_extension( + dep.GetLbEdgeExtensionRequest(), + name="name_value", + ) + + +def test_create_lb_edge_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_lb_edge_extension + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_lb_edge_extension + ] = mock_rpc + + request = {} + client.create_lb_edge_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_lb_edge_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_lb_edge_extension_rest_required_fields( + request_type=dep.CreateLbEdgeExtensionRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["lb_edge_extension_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "lbEdgeExtensionId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_lb_edge_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "lbEdgeExtensionId" in jsonified_request + assert ( + jsonified_request["lbEdgeExtensionId"] == request_init["lb_edge_extension_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["lbEdgeExtensionId"] = "lb_edge_extension_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_lb_edge_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "lb_edge_extension_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "lbEdgeExtensionId" in jsonified_request + assert jsonified_request["lbEdgeExtensionId"] == "lb_edge_extension_id_value" + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_lb_edge_extension(request) + + expected_params = [ + ( + "lbEdgeExtensionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_lb_edge_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_lb_edge_extension._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "lbEdgeExtensionId", + "requestId", + ) + ) + & set( + ( + "parent", + "lbEdgeExtensionId", + "lbEdgeExtension", + ) + ) + ) + + +def test_create_lb_edge_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), + lb_edge_extension_id="lb_edge_extension_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_lb_edge_extension(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/lbEdgeExtensions" + % client.transport._host, + args[1], + ) + + +def test_create_lb_edge_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_lb_edge_extension( + dep.CreateLbEdgeExtensionRequest(), + parent="parent_value", + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), + lb_edge_extension_id="lb_edge_extension_id_value", + ) + + +def test_update_lb_edge_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_lb_edge_extension + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_lb_edge_extension + ] = mock_rpc + + request = {} + client.update_lb_edge_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_lb_edge_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_lb_edge_extension_rest_required_fields( + request_type=dep.UpdateLbEdgeExtensionRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_lb_edge_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_lb_edge_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_lb_edge_extension(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_lb_edge_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_lb_edge_extension._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("lbEdgeExtension",)) + ) + + +def test_update_lb_edge_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "lb_edge_extension": { + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_lb_edge_extension(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{lb_edge_extension.name=projects/*/locations/*/lbEdgeExtensions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_lb_edge_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_lb_edge_extension( + dep.UpdateLbEdgeExtensionRequest(), + lb_edge_extension=dep.LbEdgeExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_lb_edge_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_lb_edge_extension + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_lb_edge_extension + ] = mock_rpc + + request = {} + client.delete_lb_edge_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_lb_edge_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_lb_edge_extension_rest_required_fields( + request_type=dep.DeleteLbEdgeExtensionRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_lb_edge_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_lb_edge_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_lb_edge_extension(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_lb_edge_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_lb_edge_extension._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_lb_edge_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_lb_edge_extension(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/lbEdgeExtensions/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_lb_edge_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_lb_edge_extension( + dep.DeleteLbEdgeExtensionRequest(), + name="name_value", + ) + + +def test_list_authz_extensions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_authz_extensions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_authz_extensions + ] = mock_rpc + + request = {} + client.list_authz_extensions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_authz_extensions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_authz_extensions_rest_required_fields( + request_type=dep.ListAuthzExtensionsRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_authz_extensions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_authz_extensions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dep.ListAuthzExtensionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.ListAuthzExtensionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_authz_extensions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_authz_extensions_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_authz_extensions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_authz_extensions_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dep.ListAuthzExtensionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dep.ListAuthzExtensionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_authz_extensions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/authzExtensions" + % client.transport._host, + args[1], + ) + + +def test_list_authz_extensions_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_authz_extensions( + dep.ListAuthzExtensionsRequest(), + parent="parent_value", + ) + + +def test_list_authz_extensions_rest_pager(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + dep.AuthzExtension(), + ], + next_page_token="abc", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[], + next_page_token="def", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + ], + next_page_token="ghi", + ), + dep.ListAuthzExtensionsResponse( + authz_extensions=[ + dep.AuthzExtension(), + dep.AuthzExtension(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(dep.ListAuthzExtensionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_authz_extensions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, dep.AuthzExtension) for i in results) + + pages = list(client.list_authz_extensions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_authz_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_authz_extension in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_authz_extension + ] = mock_rpc + + request = {} + client.get_authz_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_authz_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_authz_extension_rest_required_fields( + request_type=dep.GetAuthzExtensionRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_authz_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_authz_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = dep.AuthzExtension() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.AuthzExtension.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_authz_extension(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_authz_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_authz_extension._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_authz_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dep.AuthzExtension() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/authzExtensions/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = dep.AuthzExtension.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_authz_extension(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/authzExtensions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_authz_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_authz_extension( + dep.GetAuthzExtensionRequest(), + name="name_value", + ) + + +def test_create_authz_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_authz_extension + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_authz_extension + ] = mock_rpc + + request = {} + client.create_authz_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_authz_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_authz_extension_rest_required_fields( + request_type=dep.CreateAuthzExtensionRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["authz_extension_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "authzExtensionId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_authz_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "authzExtensionId" in jsonified_request + assert jsonified_request["authzExtensionId"] == request_init["authz_extension_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["authzExtensionId"] = "authz_extension_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_authz_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "authz_extension_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "authzExtensionId" in jsonified_request + assert jsonified_request["authzExtensionId"] == "authz_extension_id_value" + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_authz_extension(request) + + expected_params = [ + ( + "authzExtensionId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_authz_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_authz_extension._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "authzExtensionId", + "requestId", + ) + ) + & set( + ( + "parent", + "authzExtensionId", + "authzExtension", + ) + ) + ) + + +def test_create_authz_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + authz_extension=dep.AuthzExtension(name="name_value"), + authz_extension_id="authz_extension_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_authz_extension(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/authzExtensions" + % client.transport._host, + args[1], + ) + + +def test_create_authz_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_authz_extension( + dep.CreateAuthzExtensionRequest(), + parent="parent_value", + authz_extension=dep.AuthzExtension(name="name_value"), + authz_extension_id="authz_extension_id_value", + ) + + +def test_update_authz_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_authz_extension + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_authz_extension + ] = mock_rpc + + request = {} + client.update_authz_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_authz_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_authz_extension_rest_required_fields( + request_type=dep.UpdateAuthzExtensionRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_authz_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_authz_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_authz_extension(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_authz_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_authz_extension._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "authzExtension", + ) + ) + ) + + +def test_update_authz_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "authz_extension": { + "name": "projects/sample1/locations/sample2/authzExtensions/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + authz_extension=dep.AuthzExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_authz_extension(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{authz_extension.name=projects/*/locations/*/authzExtensions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_authz_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_authz_extension( + dep.UpdateAuthzExtensionRequest(), + authz_extension=dep.AuthzExtension(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_authz_extension_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_authz_extension + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_authz_extension + ] = mock_rpc + + request = {} + client.delete_authz_extension(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_authz_extension(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_authz_extension_rest_required_fields( + request_type=dep.DeleteAuthzExtensionRequest, +): + transport_class = transports.DepServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_authz_extension._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_authz_extension._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_authz_extension(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_authz_extension_rest_unset_required_fields(): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_authz_extension._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_authz_extension_rest_flattened(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/authzExtensions/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_authz_extension(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/authzExtensions/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_authz_extension_rest_flattened_error(transport: str = "rest"): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_authz_extension( + dep.DeleteAuthzExtensionRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DepServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DepServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DepServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DepServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DepServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DepServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DepServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DepServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DepServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DepServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DepServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DepServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DepServiceGrpcTransport, + transports.DepServiceGrpcAsyncIOTransport, + transports.DepServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DepServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lb_traffic_extensions_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lb_traffic_extensions), "__call__" + ) as call: + call.return_value = dep.ListLbTrafficExtensionsResponse() + client.list_lb_traffic_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListLbTrafficExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_lb_traffic_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lb_traffic_extension), "__call__" + ) as call: + call.return_value = dep.LbTrafficExtension() + client.get_lb_traffic_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetLbTrafficExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_lb_traffic_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lb_traffic_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_lb_traffic_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateLbTrafficExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_lb_traffic_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lb_traffic_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_lb_traffic_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateLbTrafficExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_lb_traffic_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lb_traffic_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_lb_traffic_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteLbTrafficExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lb_route_extensions_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lb_route_extensions), "__call__" + ) as call: + call.return_value = dep.ListLbRouteExtensionsResponse() + client.list_lb_route_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListLbRouteExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_lb_route_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lb_route_extension), "__call__" + ) as call: + call.return_value = dep.LbRouteExtension() + client.get_lb_route_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetLbRouteExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_lb_route_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lb_route_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_lb_route_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateLbRouteExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_lb_route_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lb_route_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_lb_route_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateLbRouteExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_lb_route_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lb_route_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_lb_route_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteLbRouteExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lb_edge_extensions_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lb_edge_extensions), "__call__" + ) as call: + call.return_value = dep.ListLbEdgeExtensionsResponse() + client.list_lb_edge_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListLbEdgeExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_lb_edge_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lb_edge_extension), "__call__" + ) as call: + call.return_value = dep.LbEdgeExtension() + client.get_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_lb_edge_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lb_edge_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_lb_edge_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lb_edge_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_lb_edge_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lb_edge_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_authz_extensions_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + call.return_value = dep.ListAuthzExtensionsResponse() + client.list_authz_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListAuthzExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_authz_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + call.return_value = dep.AuthzExtension() + client.get_authz_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetAuthzExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_authz_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_authz_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateAuthzExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_authz_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_authz_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateAuthzExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_authz_extension_empty_call_grpc(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_authz_extension), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_authz_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteAuthzExtensionRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DepServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_lb_traffic_extensions_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lb_traffic_extensions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.ListLbTrafficExtensionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_lb_traffic_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListLbTrafficExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_lb_traffic_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lb_traffic_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.LbTrafficExtension( + name="name_value", + description="description_value", + forwarding_rules=["forwarding_rules_value"], + load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, + ) + ) + await client.get_lb_traffic_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetLbTrafficExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_lb_traffic_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lb_traffic_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_lb_traffic_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateLbTrafficExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_lb_traffic_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lb_traffic_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_lb_traffic_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateLbTrafficExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_lb_traffic_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lb_traffic_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_lb_traffic_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteLbTrafficExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_lb_route_extensions_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lb_route_extensions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.ListLbRouteExtensionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_lb_route_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListLbRouteExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_lb_route_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lb_route_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.LbRouteExtension( + name="name_value", + description="description_value", + forwarding_rules=["forwarding_rules_value"], + load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, + ) + ) + await client.get_lb_route_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetLbRouteExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_lb_route_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lb_route_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_lb_route_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateLbRouteExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_lb_route_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lb_route_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_lb_route_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateLbRouteExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_lb_route_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lb_route_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_lb_route_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteLbRouteExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_lb_edge_extensions_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lb_edge_extensions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.ListLbEdgeExtensionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_lb_edge_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListLbEdgeExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_lb_edge_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lb_edge_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.LbEdgeExtension( + name="name_value", + description="description_value", + forwarding_rules=["forwarding_rules_value"], + load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, + ) + ) + await client.get_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_lb_edge_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lb_edge_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_lb_edge_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lb_edge_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_lb_edge_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lb_edge_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_authz_extensions_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_authz_extensions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.ListAuthzExtensionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_authz_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListAuthzExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_authz_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dep.AuthzExtension( + name="name_value", + description="description_value", + load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, + authority="authority_value", + service="service_value", + fail_open=True, + forward_headers=["forward_headers_value"], + wire_format=dep.WireFormat.EXT_PROC_GRPC, + ) + ) + await client.get_authz_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetAuthzExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_authz_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_authz_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateAuthzExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_authz_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_authz_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateAuthzExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_authz_extension_empty_call_grpc_asyncio(): + client = DepServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_authz_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + type(client.transport.delete_authz_extension), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_authz_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteAuthzExtensionRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DepServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_lb_traffic_extensions_rest_bad_request( + request_type=dep.ListLbTrafficExtensionsRequest, +): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_lb_traffic_extensions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dep.ListLbTrafficExtensionsRequest, + dict, + ], +) +def test_list_lb_traffic_extensions_rest_call_success(request_type): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dep.ListLbTrafficExtensionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.ListLbTrafficExtensionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_lb_traffic_extensions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLbTrafficExtensionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_lb_traffic_extensions_rest_interceptors(null_interceptor): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DepServiceRestInterceptor(), + ) + client = DepServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DepServiceRestInterceptor, "post_list_lb_traffic_extensions" + ) as post, mock.patch.object( + transports.DepServiceRestInterceptor, + "post_list_lb_traffic_extensions_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DepServiceRestInterceptor, "pre_list_lb_traffic_extensions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dep.ListLbTrafficExtensionsRequest.pb( + dep.ListLbTrafficExtensionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dep.ListLbTrafficExtensionsResponse.to_json( + dep.ListLbTrafficExtensionsResponse() + ) + req.return_value.content = return_value + + request = dep.ListLbTrafficExtensionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dep.ListLbTrafficExtensionsResponse() + post_with_metadata.return_value = ( + dep.ListLbTrafficExtensionsResponse(), + metadata, + ) + + client.list_lb_traffic_extensions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_lb_traffic_extension_rest_bad_request( + request_type=dep.GetLbTrafficExtensionRequest, +): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_lb_traffic_extension(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dep.GetLbTrafficExtensionRequest, + dict, + ], +) +def test_get_lb_traffic_extension_rest_call_success(request_type): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = dep.LbTrafficExtension( + name="name_value", + description="description_value", + forwarding_rules=["forwarding_rules_value"], + load_balancing_scheme=dep.LoadBalancingScheme.INTERNAL_MANAGED, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = dep.LbTrafficExtension.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_lb_traffic_extension(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, dep.LbTrafficExtension) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.forwarding_rules == ["forwarding_rules_value"] + assert response.load_balancing_scheme == dep.LoadBalancingScheme.INTERNAL_MANAGED + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_lb_traffic_extension_rest_interceptors(null_interceptor): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DepServiceRestInterceptor(), + ) + client = DepServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DepServiceRestInterceptor, "post_get_lb_traffic_extension" + ) as post, mock.patch.object( + transports.DepServiceRestInterceptor, + "post_get_lb_traffic_extension_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DepServiceRestInterceptor, "pre_get_lb_traffic_extension" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dep.GetLbTrafficExtensionRequest.pb( + dep.GetLbTrafficExtensionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = dep.LbTrafficExtension.to_json(dep.LbTrafficExtension()) + req.return_value.content = return_value + + request = dep.GetLbTrafficExtensionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = dep.LbTrafficExtension() + post_with_metadata.return_value = dep.LbTrafficExtension(), metadata + + client.get_lb_traffic_extension( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_lb_traffic_extension_rest_bad_request( + request_type=dep.CreateLbTrafficExtensionRequest, +): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_lb_traffic_extension(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dep.CreateLbTrafficExtensionRequest, + dict, + ], +) +def test_create_lb_traffic_extension_rest_call_success(request_type): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["lb_traffic_extension"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "labels": {}, + "forwarding_rules": ["forwarding_rules_value1", "forwarding_rules_value2"], + "extension_chains": [ + { + "name": "name_value", + "match_condition": {"cel_expression": "cel_expression_value"}, + "extensions": [ + { + "name": "name_value", + "authority": "authority_value", + "service": "service_value", + "supported_events": [1], + "timeout": {"seconds": 751, "nanos": 543}, + "fail_open": True, + "forward_headers": [ + "forward_headers_value1", + "forward_headers_value2", + ], + "metadata": {"fields": {}}, + } + ], + } + ], + "load_balancing_scheme": 1, + "metadata": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = dep.CreateLbTrafficExtensionRequest.meta.fields["lb_traffic_extension"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "lb_traffic_extension" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["lb_traffic_extension"][field])): + del request_init["lb_traffic_extension"][field][i][subfield] + else: + del request_init["lb_traffic_extension"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_lb_traffic_extension(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_lb_traffic_extension_rest_interceptors(null_interceptor): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DepServiceRestInterceptor(), + ) + client = DepServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DepServiceRestInterceptor, "post_create_lb_traffic_extension" + ) as post, mock.patch.object( + transports.DepServiceRestInterceptor, + "post_create_lb_traffic_extension_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DepServiceRestInterceptor, "pre_create_lb_traffic_extension" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dep.CreateLbTrafficExtensionRequest.pb( + dep.CreateLbTrafficExtensionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = dep.CreateLbTrafficExtensionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_lb_traffic_extension( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_lb_traffic_extension_rest_bad_request( + request_type=dep.UpdateLbTrafficExtensionRequest, +): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "lb_traffic_extension": { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_lb_traffic_extension(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dep.UpdateLbTrafficExtensionRequest, + dict, + ], +) +def test_update_lb_traffic_extension_rest_call_success(request_type): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "lb_traffic_extension": { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + } + } + request_init["lb_traffic_extension"] = { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "labels": {}, + "forwarding_rules": ["forwarding_rules_value1", "forwarding_rules_value2"], + "extension_chains": [ + { + "name": "name_value", + "match_condition": {"cel_expression": "cel_expression_value"}, + "extensions": [ + { + "name": "name_value", + "authority": "authority_value", + "service": "service_value", + "supported_events": [1], + "timeout": {"seconds": 751, "nanos": 543}, + "fail_open": True, + "forward_headers": [ + "forward_headers_value1", + "forward_headers_value2", + ], + "metadata": {"fields": {}}, + } + ], + } + ], + "load_balancing_scheme": 1, + "metadata": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = dep.UpdateLbTrafficExtensionRequest.meta.fields["lb_traffic_extension"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "lb_traffic_extension" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["lb_traffic_extension"][field])): + del request_init["lb_traffic_extension"][field][i][subfield] + else: + del request_init["lb_traffic_extension"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_lb_traffic_extension(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_lb_traffic_extension_rest_interceptors(null_interceptor): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DepServiceRestInterceptor(), + ) + client = DepServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DepServiceRestInterceptor, "post_update_lb_traffic_extension" + ) as post, mock.patch.object( + transports.DepServiceRestInterceptor, + "post_update_lb_traffic_extension_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DepServiceRestInterceptor, "pre_update_lb_traffic_extension" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dep.UpdateLbTrafficExtensionRequest.pb( + dep.UpdateLbTrafficExtensionRequest() ) - await client.update_authz_extension(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.UpdateAuthzExtensionRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value - assert args[0] == request_msg + request = dep.UpdateLbTrafficExtensionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + client.update_lb_traffic_extension( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_authz_extension_empty_call_grpc_asyncio(): - client = DepServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_lb_traffic_extension_rest_bad_request( + request_type=dep.DeleteLbTrafficExtensionRequest, +): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + } + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_authz_extension), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_lb_traffic_extension(request) + + +@pytest.mark.parametrize( + "request_type", + [ + dep.DeleteLbTrafficExtensionRequest, + dict, + ], +) +def test_delete_lb_traffic_extension_rest_call_success(request_type): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_lb_traffic_extension(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_lb_traffic_extension_rest_interceptors(null_interceptor): + transport = transports.DepServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DepServiceRestInterceptor(), + ) + client = DepServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.DepServiceRestInterceptor, "post_delete_lb_traffic_extension" + ) as post, mock.patch.object( + transports.DepServiceRestInterceptor, + "post_delete_lb_traffic_extension_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.DepServiceRestInterceptor, "pre_delete_lb_traffic_extension" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = dep.DeleteLbTrafficExtensionRequest.pb( + dep.DeleteLbTrafficExtensionRequest() ) - await client.delete_authz_extension(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = dep.DeleteAuthzExtensionRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value - assert args[0] == request_msg + request = dep.DeleteLbTrafficExtensionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + client.delete_lb_traffic_extension( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = DepServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_lb_traffic_extensions_rest_bad_request( - request_type=dep.ListLbTrafficExtensionsRequest, +def test_list_lb_route_extensions_rest_bad_request( + request_type=dep.ListLbRouteExtensionsRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11125,17 +15266,17 @@ def test_list_lb_traffic_extensions_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_lb_traffic_extensions(request) + client.list_lb_route_extensions(request) @pytest.mark.parametrize( "request_type", [ - dep.ListLbTrafficExtensionsRequest, + dep.ListLbRouteExtensionsRequest, dict, ], ) -def test_list_lb_traffic_extensions_rest_call_success(request_type): +def test_list_lb_route_extensions_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -11147,7 +15288,7 @@ def test_list_lb_traffic_extensions_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = dep.ListLbTrafficExtensionsResponse( + return_value = dep.ListLbRouteExtensionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -11157,21 +15298,21 @@ def test_list_lb_traffic_extensions_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = dep.ListLbTrafficExtensionsResponse.pb(return_value) + return_value = dep.ListLbRouteExtensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_lb_traffic_extensions(request) + response = client.list_lb_route_extensions(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLbTrafficExtensionsPager) + assert isinstance(response, pagers.ListLbRouteExtensionsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_lb_traffic_extensions_rest_interceptors(null_interceptor): +def test_list_lb_route_extensions_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11185,18 +15326,18 @@ def test_list_lb_traffic_extensions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DepServiceRestInterceptor, "post_list_lb_traffic_extensions" + transports.DepServiceRestInterceptor, "post_list_lb_route_extensions" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_list_lb_traffic_extensions_with_metadata", + "post_list_lb_route_extensions_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_list_lb_traffic_extensions" + transports.DepServiceRestInterceptor, "pre_list_lb_route_extensions" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.ListLbTrafficExtensionsRequest.pb( - dep.ListLbTrafficExtensionsRequest() + pb_message = dep.ListLbRouteExtensionsRequest.pb( + dep.ListLbRouteExtensionsRequest() ) transcode.return_value = { "method": "post", @@ -11208,24 +15349,21 @@ def test_list_lb_traffic_extensions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dep.ListLbTrafficExtensionsResponse.to_json( - dep.ListLbTrafficExtensionsResponse() + return_value = dep.ListLbRouteExtensionsResponse.to_json( + dep.ListLbRouteExtensionsResponse() ) req.return_value.content = return_value - request = dep.ListLbTrafficExtensionsRequest() + request = dep.ListLbRouteExtensionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = dep.ListLbTrafficExtensionsResponse() - post_with_metadata.return_value = ( - dep.ListLbTrafficExtensionsResponse(), - metadata, - ) + post.return_value = dep.ListLbRouteExtensionsResponse() + post_with_metadata.return_value = dep.ListLbRouteExtensionsResponse(), metadata - client.list_lb_traffic_extensions( + client.list_lb_route_extensions( request, metadata=[ ("key", "val"), @@ -11238,15 +15376,15 @@ def test_list_lb_traffic_extensions_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_lb_traffic_extension_rest_bad_request( - request_type=dep.GetLbTrafficExtensionRequest, +def test_get_lb_route_extension_rest_bad_request( + request_type=dep.GetLbRouteExtensionRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" } request = request_type(**request_init) @@ -11262,31 +15400,31 @@ def test_get_lb_traffic_extension_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_lb_traffic_extension(request) + client.get_lb_route_extension(request) @pytest.mark.parametrize( "request_type", [ - dep.GetLbTrafficExtensionRequest, + dep.GetLbRouteExtensionRequest, dict, ], ) -def test_get_lb_traffic_extension_rest_call_success(request_type): +def test_get_lb_route_extension_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = dep.LbTrafficExtension( + return_value = dep.LbRouteExtension( name="name_value", description="description_value", forwarding_rules=["forwarding_rules_value"], @@ -11298,15 +15436,15 @@ def test_get_lb_traffic_extension_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = dep.LbTrafficExtension.pb(return_value) + return_value = dep.LbRouteExtension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_lb_traffic_extension(request) + response = client.get_lb_route_extension(request) # Establish that the response is the type that we expect. - assert isinstance(response, dep.LbTrafficExtension) + assert isinstance(response, dep.LbRouteExtension) assert response.name == "name_value" assert response.description == "description_value" assert response.forwarding_rules == ["forwarding_rules_value"] @@ -11314,7 +15452,7 @@ def test_get_lb_traffic_extension_rest_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_lb_traffic_extension_rest_interceptors(null_interceptor): +def test_get_lb_route_extension_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11328,19 +15466,17 @@ def test_get_lb_traffic_extension_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DepServiceRestInterceptor, "post_get_lb_traffic_extension" + transports.DepServiceRestInterceptor, "post_get_lb_route_extension" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_get_lb_traffic_extension_with_metadata", + "post_get_lb_route_extension_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_get_lb_traffic_extension" + transports.DepServiceRestInterceptor, "pre_get_lb_route_extension" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.GetLbTrafficExtensionRequest.pb( - dep.GetLbTrafficExtensionRequest() - ) + pb_message = dep.GetLbRouteExtensionRequest.pb(dep.GetLbRouteExtensionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -11351,19 +15487,19 @@ def test_get_lb_traffic_extension_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dep.LbTrafficExtension.to_json(dep.LbTrafficExtension()) + return_value = dep.LbRouteExtension.to_json(dep.LbRouteExtension()) req.return_value.content = return_value - request = dep.GetLbTrafficExtensionRequest() + request = dep.GetLbRouteExtensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = dep.LbTrafficExtension() - post_with_metadata.return_value = dep.LbTrafficExtension(), metadata + post.return_value = dep.LbRouteExtension() + post_with_metadata.return_value = dep.LbRouteExtension(), metadata - client.get_lb_traffic_extension( + client.get_lb_route_extension( request, metadata=[ ("key", "val"), @@ -11376,8 +15512,8 @@ def test_get_lb_traffic_extension_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_lb_traffic_extension_rest_bad_request( - request_type=dep.CreateLbTrafficExtensionRequest, +def test_create_lb_route_extension_rest_bad_request( + request_type=dep.CreateLbRouteExtensionRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11398,24 +15534,24 @@ def test_create_lb_traffic_extension_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_lb_traffic_extension(request) + client.create_lb_route_extension(request) @pytest.mark.parametrize( "request_type", [ - dep.CreateLbTrafficExtensionRequest, + dep.CreateLbRouteExtensionRequest, dict, ], ) -def test_create_lb_traffic_extension_rest_call_success(request_type): +def test_create_lb_route_extension_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["lb_traffic_extension"] = { + request_init["lb_route_extension"] = { "name": "name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -11451,7 +15587,7 @@ def test_create_lb_traffic_extension_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = dep.CreateLbTrafficExtensionRequest.meta.fields["lb_traffic_extension"] + test_field = dep.CreateLbRouteExtensionRequest.meta.fields["lb_route_extension"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -11479,9 +15615,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "lb_traffic_extension" - ].items(): # pragma: NO COVER + for field, value in request_init["lb_route_extension"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -11511,10 +15645,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["lb_traffic_extension"][field])): - del request_init["lb_traffic_extension"][field][i][subfield] + for i in range(0, len(request_init["lb_route_extension"][field])): + del request_init["lb_route_extension"][field][i][subfield] else: - del request_init["lb_traffic_extension"][field][subfield] + del request_init["lb_route_extension"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11529,14 +15663,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_lb_traffic_extension(request) + response = client.create_lb_route_extension(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_lb_traffic_extension_rest_interceptors(null_interceptor): +def test_create_lb_route_extension_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11552,18 +15686,18 @@ def test_create_lb_traffic_extension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DepServiceRestInterceptor, "post_create_lb_traffic_extension" + transports.DepServiceRestInterceptor, "post_create_lb_route_extension" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_create_lb_traffic_extension_with_metadata", + "post_create_lb_route_extension_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_create_lb_traffic_extension" + transports.DepServiceRestInterceptor, "pre_create_lb_route_extension" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.CreateLbTrafficExtensionRequest.pb( - dep.CreateLbTrafficExtensionRequest() + pb_message = dep.CreateLbRouteExtensionRequest.pb( + dep.CreateLbRouteExtensionRequest() ) transcode.return_value = { "method": "post", @@ -11578,7 +15712,7 @@ def test_create_lb_traffic_extension_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = dep.CreateLbTrafficExtensionRequest() + request = dep.CreateLbRouteExtensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -11587,7 +15721,7 @@ def test_create_lb_traffic_extension_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_lb_traffic_extension( + client.create_lb_route_extension( request, metadata=[ ("key", "val"), @@ -11600,16 +15734,16 @@ def test_create_lb_traffic_extension_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_lb_traffic_extension_rest_bad_request( - request_type=dep.UpdateLbTrafficExtensionRequest, +def test_update_lb_route_extension_rest_bad_request( + request_type=dep.UpdateLbRouteExtensionRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "lb_traffic_extension": { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + "lb_route_extension": { + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" } } request = request_type(**request_init) @@ -11626,29 +15760,29 @@ def test_update_lb_traffic_extension_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_lb_traffic_extension(request) + client.update_lb_route_extension(request) @pytest.mark.parametrize( "request_type", [ - dep.UpdateLbTrafficExtensionRequest, + dep.UpdateLbRouteExtensionRequest, dict, ], ) -def test_update_lb_traffic_extension_rest_call_success(request_type): +def test_update_lb_route_extension_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "lb_traffic_extension": { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + "lb_route_extension": { + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" } } - request_init["lb_traffic_extension"] = { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3", + request_init["lb_route_extension"] = { + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "description": "description_value", @@ -11683,7 +15817,7 @@ def test_update_lb_traffic_extension_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = dep.UpdateLbTrafficExtensionRequest.meta.fields["lb_traffic_extension"] + test_field = dep.UpdateLbRouteExtensionRequest.meta.fields["lb_route_extension"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -11711,9 +15845,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "lb_traffic_extension" - ].items(): # pragma: NO COVER + for field, value in request_init["lb_route_extension"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -11743,10 +15875,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["lb_traffic_extension"][field])): - del request_init["lb_traffic_extension"][field][i][subfield] + for i in range(0, len(request_init["lb_route_extension"][field])): + del request_init["lb_route_extension"][field][i][subfield] else: - del request_init["lb_traffic_extension"][field][subfield] + del request_init["lb_route_extension"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -11761,14 +15893,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_lb_traffic_extension(request) + response = client.update_lb_route_extension(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_lb_traffic_extension_rest_interceptors(null_interceptor): +def test_update_lb_route_extension_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11784,18 +15916,18 @@ def test_update_lb_traffic_extension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DepServiceRestInterceptor, "post_update_lb_traffic_extension" + transports.DepServiceRestInterceptor, "post_update_lb_route_extension" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_update_lb_traffic_extension_with_metadata", + "post_update_lb_route_extension_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_update_lb_traffic_extension" + transports.DepServiceRestInterceptor, "pre_update_lb_route_extension" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.UpdateLbTrafficExtensionRequest.pb( - dep.UpdateLbTrafficExtensionRequest() + pb_message = dep.UpdateLbRouteExtensionRequest.pb( + dep.UpdateLbRouteExtensionRequest() ) transcode.return_value = { "method": "post", @@ -11810,7 +15942,7 @@ def test_update_lb_traffic_extension_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = dep.UpdateLbTrafficExtensionRequest() + request = dep.UpdateLbRouteExtensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -11819,7 +15951,7 @@ def test_update_lb_traffic_extension_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_lb_traffic_extension( + client.update_lb_route_extension( request, metadata=[ ("key", "val"), @@ -11832,15 +15964,15 @@ def test_update_lb_traffic_extension_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_lb_traffic_extension_rest_bad_request( - request_type=dep.DeleteLbTrafficExtensionRequest, +def test_delete_lb_route_extension_rest_bad_request( + request_type=dep.DeleteLbRouteExtensionRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" } request = request_type(**request_init) @@ -11856,24 +15988,24 @@ def test_delete_lb_traffic_extension_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_lb_traffic_extension(request) + client.delete_lb_route_extension(request) @pytest.mark.parametrize( "request_type", [ - dep.DeleteLbTrafficExtensionRequest, + dep.DeleteLbRouteExtensionRequest, dict, ], ) -def test_delete_lb_traffic_extension_rest_call_success(request_type): +def test_delete_lb_route_extension_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/lbTrafficExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" } request = request_type(**request_init) @@ -11889,14 +16021,14 @@ def test_delete_lb_traffic_extension_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_lb_traffic_extension(request) + response = client.delete_lb_route_extension(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_lb_traffic_extension_rest_interceptors(null_interceptor): +def test_delete_lb_route_extension_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11912,18 +16044,18 @@ def test_delete_lb_traffic_extension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DepServiceRestInterceptor, "post_delete_lb_traffic_extension" + transports.DepServiceRestInterceptor, "post_delete_lb_route_extension" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_delete_lb_traffic_extension_with_metadata", + "post_delete_lb_route_extension_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_delete_lb_traffic_extension" + transports.DepServiceRestInterceptor, "pre_delete_lb_route_extension" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.DeleteLbTrafficExtensionRequest.pb( - dep.DeleteLbTrafficExtensionRequest() + pb_message = dep.DeleteLbRouteExtensionRequest.pb( + dep.DeleteLbRouteExtensionRequest() ) transcode.return_value = { "method": "post", @@ -11938,7 +16070,7 @@ def test_delete_lb_traffic_extension_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = dep.DeleteLbTrafficExtensionRequest() + request = dep.DeleteLbRouteExtensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -11947,7 +16079,7 @@ def test_delete_lb_traffic_extension_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_lb_traffic_extension( + client.delete_lb_route_extension( request, metadata=[ ("key", "val"), @@ -11960,8 +16092,8 @@ def test_delete_lb_traffic_extension_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_lb_route_extensions_rest_bad_request( - request_type=dep.ListLbRouteExtensionsRequest, +def test_list_lb_edge_extensions_rest_bad_request( + request_type=dep.ListLbEdgeExtensionsRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11982,17 +16114,17 @@ def test_list_lb_route_extensions_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_lb_route_extensions(request) + client.list_lb_edge_extensions(request) @pytest.mark.parametrize( "request_type", [ - dep.ListLbRouteExtensionsRequest, + dep.ListLbEdgeExtensionsRequest, dict, ], ) -def test_list_lb_route_extensions_rest_call_success(request_type): +def test_list_lb_edge_extensions_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -12004,7 +16136,7 @@ def test_list_lb_route_extensions_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = dep.ListLbRouteExtensionsResponse( + return_value = dep.ListLbEdgeExtensionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -12014,21 +16146,21 @@ def test_list_lb_route_extensions_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = dep.ListLbRouteExtensionsResponse.pb(return_value) + return_value = dep.ListLbEdgeExtensionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_lb_route_extensions(request) + response = client.list_lb_edge_extensions(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLbRouteExtensionsPager) + assert isinstance(response, pagers.ListLbEdgeExtensionsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_lb_route_extensions_rest_interceptors(null_interceptor): +def test_list_lb_edge_extensions_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12042,18 +16174,18 @@ def test_list_lb_route_extensions_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DepServiceRestInterceptor, "post_list_lb_route_extensions" + transports.DepServiceRestInterceptor, "post_list_lb_edge_extensions" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_list_lb_route_extensions_with_metadata", + "post_list_lb_edge_extensions_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_list_lb_route_extensions" + transports.DepServiceRestInterceptor, "pre_list_lb_edge_extensions" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.ListLbRouteExtensionsRequest.pb( - dep.ListLbRouteExtensionsRequest() + pb_message = dep.ListLbEdgeExtensionsRequest.pb( + dep.ListLbEdgeExtensionsRequest() ) transcode.return_value = { "method": "post", @@ -12065,21 +16197,21 @@ def test_list_lb_route_extensions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dep.ListLbRouteExtensionsResponse.to_json( - dep.ListLbRouteExtensionsResponse() + return_value = dep.ListLbEdgeExtensionsResponse.to_json( + dep.ListLbEdgeExtensionsResponse() ) req.return_value.content = return_value - request = dep.ListLbRouteExtensionsRequest() + request = dep.ListLbEdgeExtensionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = dep.ListLbRouteExtensionsResponse() - post_with_metadata.return_value = dep.ListLbRouteExtensionsResponse(), metadata + post.return_value = dep.ListLbEdgeExtensionsResponse() + post_with_metadata.return_value = dep.ListLbEdgeExtensionsResponse(), metadata - client.list_lb_route_extensions( + client.list_lb_edge_extensions( request, metadata=[ ("key", "val"), @@ -12092,15 +16224,15 @@ def test_list_lb_route_extensions_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_lb_route_extension_rest_bad_request( - request_type=dep.GetLbRouteExtensionRequest, +def test_get_lb_edge_extension_rest_bad_request( + request_type=dep.GetLbEdgeExtensionRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" } request = request_type(**request_init) @@ -12116,31 +16248,31 @@ def test_get_lb_route_extension_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_lb_route_extension(request) + client.get_lb_edge_extension(request) @pytest.mark.parametrize( "request_type", [ - dep.GetLbRouteExtensionRequest, + dep.GetLbEdgeExtensionRequest, dict, ], ) -def test_get_lb_route_extension_rest_call_success(request_type): +def test_get_lb_edge_extension_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = dep.LbRouteExtension( + return_value = dep.LbEdgeExtension( name="name_value", description="description_value", forwarding_rules=["forwarding_rules_value"], @@ -12152,15 +16284,15 @@ def test_get_lb_route_extension_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = dep.LbRouteExtension.pb(return_value) + return_value = dep.LbEdgeExtension.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_lb_route_extension(request) + response = client.get_lb_edge_extension(request) # Establish that the response is the type that we expect. - assert isinstance(response, dep.LbRouteExtension) + assert isinstance(response, dep.LbEdgeExtension) assert response.name == "name_value" assert response.description == "description_value" assert response.forwarding_rules == ["forwarding_rules_value"] @@ -12168,7 +16300,7 @@ def test_get_lb_route_extension_rest_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_lb_route_extension_rest_interceptors(null_interceptor): +def test_get_lb_edge_extension_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12182,17 +16314,16 @@ def test_get_lb_route_extension_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.DepServiceRestInterceptor, "post_get_lb_route_extension" + transports.DepServiceRestInterceptor, "post_get_lb_edge_extension" ) as post, mock.patch.object( - transports.DepServiceRestInterceptor, - "post_get_lb_route_extension_with_metadata", + transports.DepServiceRestInterceptor, "post_get_lb_edge_extension_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_get_lb_route_extension" + transports.DepServiceRestInterceptor, "pre_get_lb_edge_extension" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.GetLbRouteExtensionRequest.pb(dep.GetLbRouteExtensionRequest()) + pb_message = dep.GetLbEdgeExtensionRequest.pb(dep.GetLbEdgeExtensionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -12203,19 +16334,19 @@ def test_get_lb_route_extension_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = dep.LbRouteExtension.to_json(dep.LbRouteExtension()) + return_value = dep.LbEdgeExtension.to_json(dep.LbEdgeExtension()) req.return_value.content = return_value - request = dep.GetLbRouteExtensionRequest() + request = dep.GetLbEdgeExtensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = dep.LbRouteExtension() - post_with_metadata.return_value = dep.LbRouteExtension(), metadata + post.return_value = dep.LbEdgeExtension() + post_with_metadata.return_value = dep.LbEdgeExtension(), metadata - client.get_lb_route_extension( + client.get_lb_edge_extension( request, metadata=[ ("key", "val"), @@ -12228,8 +16359,8 @@ def test_get_lb_route_extension_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_lb_route_extension_rest_bad_request( - request_type=dep.CreateLbRouteExtensionRequest, +def test_create_lb_edge_extension_rest_bad_request( + request_type=dep.CreateLbEdgeExtensionRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12250,24 +16381,24 @@ def test_create_lb_route_extension_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_lb_route_extension(request) + client.create_lb_edge_extension(request) @pytest.mark.parametrize( "request_type", [ - dep.CreateLbRouteExtensionRequest, + dep.CreateLbEdgeExtensionRequest, dict, ], ) -def test_create_lb_route_extension_rest_call_success(request_type): +def test_create_lb_edge_extension_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["lb_route_extension"] = { + request_init["lb_edge_extension"] = { "name": "name_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -12296,14 +16427,13 @@ def test_create_lb_route_extension_rest_call_success(request_type): } ], "load_balancing_scheme": 1, - "metadata": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = dep.CreateLbRouteExtensionRequest.meta.fields["lb_route_extension"] + test_field = dep.CreateLbEdgeExtensionRequest.meta.fields["lb_edge_extension"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12331,7 +16461,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["lb_route_extension"].items(): # pragma: NO COVER + for field, value in request_init["lb_edge_extension"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12361,10 +16491,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["lb_route_extension"][field])): - del request_init["lb_route_extension"][field][i][subfield] + for i in range(0, len(request_init["lb_edge_extension"][field])): + del request_init["lb_edge_extension"][field][i][subfield] else: - del request_init["lb_route_extension"][field][subfield] + del request_init["lb_edge_extension"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12379,14 +16509,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_lb_route_extension(request) + response = client.create_lb_edge_extension(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_lb_route_extension_rest_interceptors(null_interceptor): +def test_create_lb_edge_extension_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12402,18 +16532,18 @@ def test_create_lb_route_extension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DepServiceRestInterceptor, "post_create_lb_route_extension" + transports.DepServiceRestInterceptor, "post_create_lb_edge_extension" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_create_lb_route_extension_with_metadata", + "post_create_lb_edge_extension_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_create_lb_route_extension" + transports.DepServiceRestInterceptor, "pre_create_lb_edge_extension" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.CreateLbRouteExtensionRequest.pb( - dep.CreateLbRouteExtensionRequest() + pb_message = dep.CreateLbEdgeExtensionRequest.pb( + dep.CreateLbEdgeExtensionRequest() ) transcode.return_value = { "method": "post", @@ -12428,7 +16558,7 @@ def test_create_lb_route_extension_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = dep.CreateLbRouteExtensionRequest() + request = dep.CreateLbEdgeExtensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -12437,7 +16567,7 @@ def test_create_lb_route_extension_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_lb_route_extension( + client.create_lb_edge_extension( request, metadata=[ ("key", "val"), @@ -12450,16 +16580,16 @@ def test_create_lb_route_extension_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_lb_route_extension_rest_bad_request( - request_type=dep.UpdateLbRouteExtensionRequest, +def test_update_lb_edge_extension_rest_bad_request( + request_type=dep.UpdateLbEdgeExtensionRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "lb_route_extension": { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + "lb_edge_extension": { + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" } } request = request_type(**request_init) @@ -12476,29 +16606,29 @@ def test_update_lb_route_extension_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_lb_route_extension(request) + client.update_lb_edge_extension(request) @pytest.mark.parametrize( "request_type", [ - dep.UpdateLbRouteExtensionRequest, + dep.UpdateLbEdgeExtensionRequest, dict, ], ) -def test_update_lb_route_extension_rest_call_success(request_type): +def test_update_lb_edge_extension_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "lb_route_extension": { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + "lb_edge_extension": { + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" } } - request_init["lb_route_extension"] = { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3", + request_init["lb_edge_extension"] = { + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, "description": "description_value", @@ -12526,14 +16656,13 @@ def test_update_lb_route_extension_rest_call_success(request_type): } ], "load_balancing_scheme": 1, - "metadata": {}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = dep.UpdateLbRouteExtensionRequest.meta.fields["lb_route_extension"] + test_field = dep.UpdateLbEdgeExtensionRequest.meta.fields["lb_edge_extension"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -12561,7 +16690,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["lb_route_extension"].items(): # pragma: NO COVER + for field, value in request_init["lb_edge_extension"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -12591,10 +16720,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["lb_route_extension"][field])): - del request_init["lb_route_extension"][field][i][subfield] + for i in range(0, len(request_init["lb_edge_extension"][field])): + del request_init["lb_edge_extension"][field][i][subfield] else: - del request_init["lb_route_extension"][field][subfield] + del request_init["lb_edge_extension"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -12609,14 +16738,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_lb_route_extension(request) + response = client.update_lb_edge_extension(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_lb_route_extension_rest_interceptors(null_interceptor): +def test_update_lb_edge_extension_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12632,18 +16761,18 @@ def test_update_lb_route_extension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DepServiceRestInterceptor, "post_update_lb_route_extension" + transports.DepServiceRestInterceptor, "post_update_lb_edge_extension" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_update_lb_route_extension_with_metadata", + "post_update_lb_edge_extension_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_update_lb_route_extension" + transports.DepServiceRestInterceptor, "pre_update_lb_edge_extension" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.UpdateLbRouteExtensionRequest.pb( - dep.UpdateLbRouteExtensionRequest() + pb_message = dep.UpdateLbEdgeExtensionRequest.pb( + dep.UpdateLbEdgeExtensionRequest() ) transcode.return_value = { "method": "post", @@ -12658,7 +16787,7 @@ def test_update_lb_route_extension_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = dep.UpdateLbRouteExtensionRequest() + request = dep.UpdateLbEdgeExtensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -12667,7 +16796,7 @@ def test_update_lb_route_extension_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_lb_route_extension( + client.update_lb_edge_extension( request, metadata=[ ("key", "val"), @@ -12680,15 +16809,15 @@ def test_update_lb_route_extension_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_lb_route_extension_rest_bad_request( - request_type=dep.DeleteLbRouteExtensionRequest, +def test_delete_lb_edge_extension_rest_bad_request( + request_type=dep.DeleteLbEdgeExtensionRequest, ): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" } request = request_type(**request_init) @@ -12704,24 +16833,24 @@ def test_delete_lb_route_extension_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_lb_route_extension(request) + client.delete_lb_edge_extension(request) @pytest.mark.parametrize( "request_type", [ - dep.DeleteLbRouteExtensionRequest, + dep.DeleteLbEdgeExtensionRequest, dict, ], ) -def test_delete_lb_route_extension_rest_call_success(request_type): +def test_delete_lb_edge_extension_rest_call_success(request_type): client = DepServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/lbRouteExtensions/sample3" + "name": "projects/sample1/locations/sample2/lbEdgeExtensions/sample3" } request = request_type(**request_init) @@ -12737,14 +16866,14 @@ def test_delete_lb_route_extension_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_lb_route_extension(request) + response = client.delete_lb_edge_extension(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_lb_route_extension_rest_interceptors(null_interceptor): +def test_delete_lb_edge_extension_rest_interceptors(null_interceptor): transport = transports.DepServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12760,18 +16889,18 @@ def test_delete_lb_route_extension_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.DepServiceRestInterceptor, "post_delete_lb_route_extension" + transports.DepServiceRestInterceptor, "post_delete_lb_edge_extension" ) as post, mock.patch.object( transports.DepServiceRestInterceptor, - "post_delete_lb_route_extension_with_metadata", + "post_delete_lb_edge_extension_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.DepServiceRestInterceptor, "pre_delete_lb_route_extension" + transports.DepServiceRestInterceptor, "pre_delete_lb_edge_extension" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = dep.DeleteLbRouteExtensionRequest.pb( - dep.DeleteLbRouteExtensionRequest() + pb_message = dep.DeleteLbEdgeExtensionRequest.pb( + dep.DeleteLbEdgeExtensionRequest() ) transcode.return_value = { "method": "post", @@ -12786,7 +16915,7 @@ def test_delete_lb_route_extension_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = dep.DeleteLbRouteExtensionRequest() + request = dep.DeleteLbEdgeExtensionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -12795,7 +16924,7 @@ def test_delete_lb_route_extension_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_lb_route_extension( + client.delete_lb_edge_extension( request, metadata=[ ("key", "val"), @@ -14418,6 +18547,116 @@ def test_delete_lb_route_extension_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_lb_edge_extensions_empty_call_rest(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_lb_edge_extensions), "__call__" + ) as call: + client.list_lb_edge_extensions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.ListLbEdgeExtensionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_lb_edge_extension_empty_call_rest(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_lb_edge_extension), "__call__" + ) as call: + client.get_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.GetLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_lb_edge_extension_empty_call_rest(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_lb_edge_extension), "__call__" + ) as call: + client.create_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.CreateLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_lb_edge_extension_empty_call_rest(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_lb_edge_extension), "__call__" + ) as call: + client.update_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.UpdateLbEdgeExtensionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_lb_edge_extension_empty_call_rest(): + client = DepServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_lb_edge_extension), "__call__" + ) as call: + client.delete_lb_edge_extension(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = dep.DeleteLbEdgeExtensionRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_authz_extensions_empty_call_rest(): @@ -14588,6 +18827,11 @@ def test_dep_service_base_transport(): "create_lb_route_extension", "update_lb_route_extension", "delete_lb_route_extension", + "list_lb_edge_extensions", + "get_lb_edge_extension", + "create_lb_edge_extension", + "update_lb_edge_extension", + "delete_lb_edge_extension", "list_authz_extensions", "get_authz_extension", "create_authz_extension", @@ -14892,6 +19136,21 @@ def test_dep_service_client_transport_session_collision(transport_name): session1 = client1.transport.delete_lb_route_extension._session session2 = client2.transport.delete_lb_route_extension._session assert session1 != session2 + session1 = client1.transport.list_lb_edge_extensions._session + session2 = client2.transport.list_lb_edge_extensions._session + assert session1 != session2 + session1 = client1.transport.get_lb_edge_extension._session + session2 = client2.transport.get_lb_edge_extension._session + assert session1 != session2 + session1 = client1.transport.create_lb_edge_extension._session + session2 = client2.transport.create_lb_edge_extension._session + assert session1 != session2 + session1 = client1.transport.update_lb_edge_extension._session + session2 = client2.transport.update_lb_edge_extension._session + assert session1 != session2 + session1 = client1.transport.delete_lb_edge_extension._session + session2 = client2.transport.delete_lb_edge_extension._session + assert session1 != session2 session1 = client1.transport.list_authz_extensions._session session2 = client2.transport.list_authz_extensions._session assert session1 != session2 @@ -15087,10 +19346,38 @@ def test_parse_authz_extension_path(): assert expected == actual -def test_lb_route_extension_path(): +def test_lb_edge_extension_path(): project = "cuttlefish" location = "mussel" - lb_route_extension = "winkle" + lb_edge_extension = "winkle" + expected = "projects/{project}/locations/{location}/lbEdgeExtensions/{lb_edge_extension}".format( + project=project, + location=location, + lb_edge_extension=lb_edge_extension, + ) + actual = DepServiceClient.lb_edge_extension_path( + project, location, lb_edge_extension + ) + assert expected == actual + + +def test_parse_lb_edge_extension_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "lb_edge_extension": "abalone", + } + path = DepServiceClient.lb_edge_extension_path(**expected) + + # Check that the path construction is reversible. + actual = DepServiceClient.parse_lb_edge_extension_path(path) + assert expected == actual + + +def test_lb_route_extension_path(): + project = "squid" + location = "clam" + lb_route_extension = "whelk" expected = "projects/{project}/locations/{location}/lbRouteExtensions/{lb_route_extension}".format( project=project, location=location, @@ -15104,9 +19391,9 @@ def test_lb_route_extension_path(): def test_parse_lb_route_extension_path(): expected = { - "project": "nautilus", - "location": "scallop", - "lb_route_extension": "abalone", + "project": "octopus", + "location": "oyster", + "lb_route_extension": "nudibranch", } path = DepServiceClient.lb_route_extension_path(**expected) @@ -15116,9 +19403,9 @@ def test_parse_lb_route_extension_path(): def test_lb_traffic_extension_path(): - project = "squid" - location = "clam" - lb_traffic_extension = "whelk" + project = "cuttlefish" + location = "mussel" + lb_traffic_extension = "winkle" expected = "projects/{project}/locations/{location}/lbTrafficExtensions/{lb_traffic_extension}".format( project=project, location=location, @@ -15132,9 +19419,9 @@ def test_lb_traffic_extension_path(): def test_parse_lb_traffic_extension_path(): expected = { - "project": "octopus", - "location": "oyster", - "lb_traffic_extension": "nudibranch", + "project": "nautilus", + "location": "scallop", + "lb_traffic_extension": "abalone", } path = DepServiceClient.lb_traffic_extension_path(**expected) @@ -15144,7 +19431,7 @@ def test_parse_lb_traffic_extension_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -15154,7 +19441,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = DepServiceClient.common_billing_account_path(**expected) @@ -15164,7 +19451,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -15174,7 +19461,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = DepServiceClient.common_folder_path(**expected) @@ -15184,7 +19471,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -15194,7 +19481,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = DepServiceClient.common_organization_path(**expected) @@ -15204,7 +19491,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -15214,7 +19501,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = DepServiceClient.common_project_path(**expected) @@ -15224,8 +19511,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -15236,8 +19523,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = DepServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/__init__.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/__init__.py index b03259319214..c198676d3d45 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/__init__.py @@ -52,6 +52,9 @@ ListGlossariesRequest, ListGlossariesResponse, OutputConfig, + RefinementEntry, + RefineTextRequest, + RefineTextResponse, SupportedLanguage, SupportedLanguages, TranslateDocumentRequest, @@ -93,6 +96,9 @@ "ListGlossariesRequest", "ListGlossariesResponse", "OutputConfig", + "RefineTextRequest", + "RefineTextResponse", + "RefinementEntry", "SupportedLanguage", "SupportedLanguages", "TranslateDocumentRequest", diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_metadata.json b/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_metadata.json index 8ad815f314a2..e9117af004a5 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_metadata.json +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/gapic_metadata.json @@ -50,6 +50,11 @@ "list_glossaries" ] }, + "RefineText": { + "methods": [ + "refine_text" + ] + }, "TranslateDocument": { "methods": [ "translate_document" @@ -105,6 +110,11 @@ "list_glossaries" ] }, + "RefineText": { + "methods": [ + "refine_text" + ] + }, "TranslateDocument": { "methods": [ "translate_document" @@ -160,6 +170,11 @@ "list_glossaries" ] }, + "RefineText": { + "methods": [ + "refine_text" + ] + }, "TranslateDocument": { "methods": [ "translate_document" diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py index 7a4ad2f4a474..74b7c4997189 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/async_client.py @@ -1676,6 +1676,97 @@ async def sample_delete_glossary(): # Done; return the response. return response + async def refine_text( + self, + request: Optional[Union[translation_service.RefineTextRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> translation_service.RefineTextResponse: + r"""Refines the input translated text to improve the + quality. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3beta1 + + async def sample_refine_text(): + # Create a client + client = translate_v3beta1.TranslationServiceAsyncClient() + + # Initialize request argument(s) + refinement_entries = translate_v3beta1.RefinementEntry() + refinement_entries.source_text = "source_text_value" + refinement_entries.original_translation = "original_translation_value" + + request = translate_v3beta1.RefineTextRequest( + parent="parent_value", + refinement_entries=refinement_entries, + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + ) + + # Make the request + response = await client.refine_text(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.translate_v3beta1.types.RefineTextRequest, dict]]): + The request object. Request message for RefineText. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.translate_v3beta1.types.RefineTextResponse: + Response message for RefineText. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, translation_service.RefineTextRequest): + request = translation_service.RefineTextRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.refine_text + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py index 47b66204bdce..679aa428fa64 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/client.py @@ -2073,6 +2073,95 @@ def sample_delete_glossary(): # Done; return the response. return response + def refine_text( + self, + request: Optional[Union[translation_service.RefineTextRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> translation_service.RefineTextResponse: + r"""Refines the input translated text to improve the + quality. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import translate_v3beta1 + + def sample_refine_text(): + # Create a client + client = translate_v3beta1.TranslationServiceClient() + + # Initialize request argument(s) + refinement_entries = translate_v3beta1.RefinementEntry() + refinement_entries.source_text = "source_text_value" + refinement_entries.original_translation = "original_translation_value" + + request = translate_v3beta1.RefineTextRequest( + parent="parent_value", + refinement_entries=refinement_entries, + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + ) + + # Make the request + response = client.refine_text(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.translate_v3beta1.types.RefineTextRequest, dict]): + The request object. Request message for RefineText. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.translate_v3beta1.types.RefineTextResponse: + Response message for RefineText. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, translation_service.RefineTextRequest): + request = translation_service.RefineTextRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.refine_text] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "TranslationServiceClient": return self diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/base.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/base.py index c8d8f99bf518..1253ee9844ad 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/base.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/base.py @@ -230,6 +230,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.refine_text: gapic_v1.method.wrap_method( + self.refine_text, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -386,6 +391,18 @@ def delete_glossary( ]: raise NotImplementedError() + @property + def refine_text( + self, + ) -> Callable[ + [translation_service.RefineTextRequest], + Union[ + translation_service.RefineTextResponse, + Awaitable[translation_service.RefineTextResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py index b02ae2bf152d..d6e335f0d4b7 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc.py @@ -651,6 +651,35 @@ def delete_glossary( ) return self._stubs["delete_glossary"] + @property + def refine_text( + self, + ) -> Callable[ + [translation_service.RefineTextRequest], translation_service.RefineTextResponse + ]: + r"""Return a callable for the refine text method over gRPC. + + Refines the input translated text to improve the + quality. + + Returns: + Callable[[~.RefineTextRequest], + ~.RefineTextResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "refine_text" not in self._stubs: + self._stubs["refine_text"] = self._logged_channel.unary_unary( + "/google.cloud.translation.v3beta1.TranslationService/RefineText", + request_serializer=translation_service.RefineTextRequest.serialize, + response_deserializer=translation_service.RefineTextResponse.deserialize, + ) + return self._stubs["refine_text"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py index 9d13d462f771..05c6c98aac0a 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/grpc_asyncio.py @@ -662,6 +662,36 @@ def delete_glossary( ) return self._stubs["delete_glossary"] + @property + def refine_text( + self, + ) -> Callable[ + [translation_service.RefineTextRequest], + Awaitable[translation_service.RefineTextResponse], + ]: + r"""Return a callable for the refine text method over gRPC. + + Refines the input translated text to improve the + quality. + + Returns: + Callable[[~.RefineTextRequest], + Awaitable[~.RefineTextResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "refine_text" not in self._stubs: + self._stubs["refine_text"] = self._logged_channel.unary_unary( + "/google.cloud.translation.v3beta1.TranslationService/RefineText", + request_serializer=translation_service.RefineTextRequest.serialize, + response_deserializer=translation_service.RefineTextResponse.deserialize, + ) + return self._stubs["refine_text"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -755,6 +785,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.refine_text: self._wrap_method( + self.refine_text, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest.py index 1743ed18ec21..a1d337ae88bb 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest.py @@ -140,6 +140,14 @@ def post_list_glossaries(self, response): logging.log(f"Received response: {response}") return response + def pre_refine_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_refine_text(self, response): + logging.log(f"Received response: {response}") + return response + def pre_translate_document(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -561,6 +569,56 @@ def post_list_glossaries_with_metadata( """ return response, metadata + def pre_refine_text( + self, + request: translation_service.RefineTextRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + translation_service.RefineTextRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for refine_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the TranslationService server. + """ + return request, metadata + + def post_refine_text( + self, response: translation_service.RefineTextResponse + ) -> translation_service.RefineTextResponse: + """Post-rpc interceptor for refine_text + + DEPRECATED. Please use the `post_refine_text_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the TranslationService server but before + it is returned to user code. This `post_refine_text` interceptor runs + before the `post_refine_text_with_metadata` interceptor. + """ + return response + + def post_refine_text_with_metadata( + self, + response: translation_service.RefineTextResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + translation_service.RefineTextResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for refine_text + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the TranslationService server but before it is returned to user code. + + We recommend only using this `post_refine_text_with_metadata` + interceptor in new development instead of the `post_refine_text` interceptor. + When both interceptors are used, this `post_refine_text_with_metadata` interceptor runs after the + `post_refine_text` interceptor. The (possibly modified) response returned by + `post_refine_text` will be passed to + `post_refine_text_with_metadata`. + """ + return response, metadata + def pre_translate_document( self, request: translation_service.TranslateDocumentRequest, @@ -2199,6 +2257,158 @@ def __call__( ) return resp + class _RefineText( + _BaseTranslationServiceRestTransport._BaseRefineText, TranslationServiceRestStub + ): + def __hash__(self): + return hash("TranslationServiceRestTransport.RefineText") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: translation_service.RefineTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> translation_service.RefineTextResponse: + r"""Call the refine text method over HTTP. + + Args: + request (~.translation_service.RefineTextRequest): + The request object. Request message for RefineText. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.translation_service.RefineTextResponse: + Response message for RefineText. + """ + + http_options = ( + _BaseTranslationServiceRestTransport._BaseRefineText._get_http_options() + ) + + request, metadata = self._interceptor.pre_refine_text(request, metadata) + transcoded_request = _BaseTranslationServiceRestTransport._BaseRefineText._get_transcoded_request( + http_options, request + ) + + body = _BaseTranslationServiceRestTransport._BaseRefineText._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseTranslationServiceRestTransport._BaseRefineText._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.translation_v3beta1.TranslationServiceClient.RefineText", + extra={ + "serviceName": "google.cloud.translation.v3beta1.TranslationService", + "rpcName": "RefineText", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = TranslationServiceRestTransport._RefineText._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = translation_service.RefineTextResponse() + pb_resp = translation_service.RefineTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_refine_text(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_refine_text_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = translation_service.RefineTextResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.translation_v3beta1.TranslationServiceClient.refine_text", + extra={ + "serviceName": "google.cloud.translation.v3beta1.TranslationService", + "rpcName": "RefineText", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _TranslateDocument( _BaseTranslationServiceRestTransport._BaseTranslateDocument, TranslationServiceRestStub, @@ -2593,6 +2803,16 @@ def list_glossaries( # In C++ this would require a dynamic_cast return self._ListGlossaries(self._session, self._host, self._interceptor) # type: ignore + @property + def refine_text( + self, + ) -> Callable[ + [translation_service.RefineTextRequest], translation_service.RefineTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RefineText(self._session, self._host, self._interceptor) # type: ignore + @property def translate_document( self, diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest_base.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest_base.py index 5f3282ba7c75..9be73eaa1aab 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest_base.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/services/translation_service/transports/rest_base.py @@ -516,6 +516,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseRefineText: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3beta1/{parent=projects/*/locations/*}:refineText", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = translation_service.RefineTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseTranslationServiceRestTransport._BaseRefineText._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseTranslateDocument: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/__init__.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/__init__.py index cb44d756bad7..1f4f59beb285 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/__init__.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/__init__.py @@ -43,6 +43,9 @@ ListGlossariesRequest, ListGlossariesResponse, OutputConfig, + RefinementEntry, + RefineTextRequest, + RefineTextResponse, SupportedLanguage, SupportedLanguages, TranslateDocumentRequest, @@ -83,6 +86,9 @@ "ListGlossariesRequest", "ListGlossariesResponse", "OutputConfig", + "RefinementEntry", + "RefineTextRequest", + "RefineTextResponse", "SupportedLanguage", "SupportedLanguages", "TranslateDocumentRequest", diff --git a/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/translation_service.py b/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/translation_service.py index 987adbf1903e..6b5b632cb9b9 100644 --- a/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/translation_service.py +++ b/packages/google-cloud-translate/google/cloud/translate_v3beta1/types/translation_service.py @@ -60,6 +60,9 @@ "BatchDocumentOutputConfig", "BatchTranslateDocumentResponse", "BatchTranslateDocumentMetadata", + "RefinementEntry", + "RefineTextRequest", + "RefineTextResponse", }, ) @@ -76,6 +79,9 @@ class TranslateTextGlossaryConfig(proto.Message): Optional. Indicates match is case-insensitive. Default value is false if missing. + contextual_translation_enabled (bool): + Optional. If set to true, the glossary will + be used for contextual translation. """ glossary: str = proto.Field( @@ -86,6 +92,10 @@ class TranslateTextGlossaryConfig(proto.Message): proto.BOOL, number=2, ) + contextual_translation_enabled: bool = proto.Field( + proto.BOOL, + number=4, + ) class TranslateTextRequest(proto.Message): @@ -1817,6 +1827,9 @@ class BatchTranslateDocumentRequest(proto.Message): enable_rotation_correction (bool): Optional. If true, enable auto rotation correction in DVS. + pdf_native_only (bool): + Optional. If true, only native pdf pages will + be translated. """ parent: str = proto.Field( @@ -1869,6 +1882,10 @@ class BatchTranslateDocumentRequest(proto.Message): proto.BOOL, number=12, ) + pdf_native_only: bool = proto.Field( + proto.BOOL, + number=13, + ) class BatchDocumentInputConfig(proto.Message): @@ -2184,4 +2201,85 @@ class State(proto.Enum): ) +class RefinementEntry(proto.Message): + r"""A single refinement entry for RefineTextRequest. + + Attributes: + source_text (str): + Required. The source text to be refined. + original_translation (str): + Required. The original translation of the + source text. + """ + + source_text: str = proto.Field( + proto.STRING, + number=1, + ) + original_translation: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RefineTextRequest(proto.Message): + r"""Request message for RefineText. + + Attributes: + parent (str): + Required. Project or location to make a call. Must refer to + a caller's project. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + refinement_entries (MutableSequence[google.cloud.translate_v3beta1.types.RefinementEntry]): + Required. The source texts and original + translations in the source and target languages. + source_language_code (str): + Required. The BCP-47 language code of the + source text in the request, for example, + "en-US". + target_language_code (str): + Required. The BCP-47 language code for + translation output, for example, "zh-CN". + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + refinement_entries: MutableSequence["RefinementEntry"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="RefinementEntry", + ) + source_language_code: str = proto.Field( + proto.STRING, + number=4, + ) + target_language_code: str = proto.Field( + proto.STRING, + number=5, + ) + + +class RefineTextResponse(proto.Message): + r"""Response message for RefineText. + + Attributes: + refined_translations (MutableSequence[str]): + The refined translations obtained from the + original translations. + """ + + refined_translations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-translate/noxfile.py b/packages/google-cloud-translate/noxfile.py index 05ede8cfb88e..10b6077b8c7d 100644 --- a/packages/google-cloud-translate/noxfile.py +++ b/packages/google-cloud-translate/noxfile.py @@ -27,6 +27,10 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") + ALL_PYTHON = [ "3.7", "3.8", diff --git a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json index 8057586e2973..0dab3397bed5 100644 --- a/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json +++ b/packages/google-cloud-translate/samples/generated_samples/snippet_metadata_google.cloud.translation.v3beta1.json @@ -1371,6 +1371,159 @@ ], "title": "translate_v3beta1_generated_translation_service_list_glossaries_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.translate_v3beta1.TranslationServiceAsyncClient", + "shortName": "TranslationServiceAsyncClient" + }, + "fullName": "google.cloud.translate_v3beta1.TranslationServiceAsyncClient.refine_text", + "method": { + "fullName": "google.cloud.translation.v3beta1.TranslationService.RefineText", + "service": { + "fullName": "google.cloud.translation.v3beta1.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "RefineText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3beta1.types.RefineTextRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.translate_v3beta1.types.RefineTextResponse", + "shortName": "refine_text" + }, + "description": "Sample for RefineText", + "file": "translate_v3beta1_generated_translation_service_refine_text_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3beta1_generated_TranslationService_RefineText_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3beta1_generated_translation_service_refine_text_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.translate_v3beta1.TranslationServiceClient", + "shortName": "TranslationServiceClient" + }, + "fullName": "google.cloud.translate_v3beta1.TranslationServiceClient.refine_text", + "method": { + "fullName": "google.cloud.translation.v3beta1.TranslationService.RefineText", + "service": { + "fullName": "google.cloud.translation.v3beta1.TranslationService", + "shortName": "TranslationService" + }, + "shortName": "RefineText" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.translate_v3beta1.types.RefineTextRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.translate_v3beta1.types.RefineTextResponse", + "shortName": "refine_text" + }, + "description": "Sample for RefineText", + "file": "translate_v3beta1_generated_translation_service_refine_text_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "translate_v3beta1_generated_TranslationService_RefineText_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "translate_v3beta1_generated_translation_service_refine_text_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_async.py index 15247a55fe69..992ad6004328 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_async.py @@ -42,7 +42,7 @@ async def sample_adaptive_mt_translate(): request = translate_v3.AdaptiveMtTranslateRequest( parent="parent_value", dataset="dataset_value", - content=['content_value1', 'content_value2'], + content=["content_value1", "content_value2"], ) # Make the request @@ -51,4 +51,5 @@ async def sample_adaptive_mt_translate(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_AdaptiveMtTranslate_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_sync.py index 0c44d386ddb7..10233ed692ae 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_adaptive_mt_translate_sync.py @@ -42,7 +42,7 @@ def sample_adaptive_mt_translate(): request = translate_v3.AdaptiveMtTranslateRequest( parent="parent_value", dataset="dataset_value", - content=['content_value1', 'content_value2'], + content=["content_value1", "content_value2"], ) # Make the request @@ -51,4 +51,5 @@ def sample_adaptive_mt_translate(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_AdaptiveMtTranslate_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_document_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_document_async.py index a67c9d6e3977..dc0223622987 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_document_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_document_async.py @@ -48,7 +48,10 @@ async def sample_batch_translate_document(): request = translate_v3.BatchTranslateDocumentRequest( parent="parent_value", source_language_code="source_language_code_value", - target_language_codes=['target_language_codes_value1', 'target_language_codes_value2'], + target_language_codes=[ + "target_language_codes_value1", + "target_language_codes_value2", + ], input_configs=input_configs, output_config=output_config, ) @@ -63,4 +66,5 @@ async def sample_batch_translate_document(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_BatchTranslateDocument_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_document_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_document_sync.py index d16b091322b1..8642127d7a41 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_document_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_document_sync.py @@ -48,7 +48,10 @@ def sample_batch_translate_document(): request = translate_v3.BatchTranslateDocumentRequest( parent="parent_value", source_language_code="source_language_code_value", - target_language_codes=['target_language_codes_value1', 'target_language_codes_value2'], + target_language_codes=[ + "target_language_codes_value1", + "target_language_codes_value2", + ], input_configs=input_configs, output_config=output_config, ) @@ -63,4 +66,5 @@ def sample_batch_translate_document(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_BatchTranslateDocument_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_text_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_text_async.py index 1f960337a739..ed6c65848bf2 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_text_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_text_async.py @@ -48,7 +48,10 @@ async def sample_batch_translate_text(): request = translate_v3.BatchTranslateTextRequest( parent="parent_value", source_language_code="source_language_code_value", - target_language_codes=['target_language_codes_value1', 'target_language_codes_value2'], + target_language_codes=[ + "target_language_codes_value1", + "target_language_codes_value2", + ], input_configs=input_configs, output_config=output_config, ) @@ -63,4 +66,5 @@ async def sample_batch_translate_text(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_BatchTranslateText_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_text_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_text_sync.py index b07129d9ad2b..1c45d64dbbd3 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_text_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_batch_translate_text_sync.py @@ -48,7 +48,10 @@ def sample_batch_translate_text(): request = translate_v3.BatchTranslateTextRequest( parent="parent_value", source_language_code="source_language_code_value", - target_language_codes=['target_language_codes_value1', 'target_language_codes_value2'], + target_language_codes=[ + "target_language_codes_value1", + "target_language_codes_value2", + ], input_configs=input_configs, output_config=output_config, ) @@ -63,4 +66,5 @@ def sample_batch_translate_text(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_BatchTranslateText_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py index 8118ab482131..f5620c5df390 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_async.py @@ -53,4 +53,5 @@ async def sample_create_adaptive_mt_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateAdaptiveMtDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py index 4e7e50f45246..3f88b03c53d9 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_adaptive_mt_dataset_sync.py @@ -53,4 +53,5 @@ def sample_create_adaptive_mt_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateAdaptiveMtDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_async.py index 2bedbcce54b7..42ba6ca10c09 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_async.py @@ -53,4 +53,5 @@ async def sample_create_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_sync.py index 33f218979cd7..c618b4642219 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_dataset_sync.py @@ -53,4 +53,5 @@ def sample_create_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_async.py index 520af16d458c..17ed70124836 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_async.py @@ -57,4 +57,5 @@ async def sample_create_glossary(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateGlossary_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_async.py index a89b111552ec..a5b665ed094d 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_async.py @@ -49,4 +49,5 @@ async def sample_create_glossary_entry(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateGlossaryEntry_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_sync.py index f56f01291ecc..bde69530096e 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_entry_sync.py @@ -49,4 +49,5 @@ def sample_create_glossary_entry(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateGlossaryEntry_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_sync.py index 8f7acc8d9251..ff0eaf764ae2 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_glossary_sync.py @@ -57,4 +57,5 @@ def sample_create_glossary(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateGlossary_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_async.py index 2e21362b8191..da2f52aafb03 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_async.py @@ -53,4 +53,5 @@ async def sample_create_model(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateModel_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_sync.py index 705f470dbab8..60c29c13c558 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_create_model_sync.py @@ -53,4 +53,5 @@ def sample_create_model(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_CreateModel_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_async.py index 552eaa448bbe..6a95aae2737d 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_async.py @@ -53,4 +53,5 @@ async def sample_delete_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_DeleteDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_sync.py index bdd174b1f9a2..30f11ba4c66d 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_dataset_sync.py @@ -53,4 +53,5 @@ def sample_delete_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_DeleteDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_async.py index 36dcfe48a258..1ebba9e951f8 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_async.py @@ -53,4 +53,5 @@ async def sample_delete_glossary(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_DeleteGlossary_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_sync.py index 14344e2bb1ea..91a152712944 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_glossary_sync.py @@ -53,4 +53,5 @@ def sample_delete_glossary(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_DeleteGlossary_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_async.py index 106a1f766818..b69af9c68da8 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_async.py @@ -53,4 +53,5 @@ async def sample_delete_model(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_DeleteModel_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_sync.py index 9a861e96581d..5a4c971abb2c 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_delete_model_sync.py @@ -53,4 +53,5 @@ def sample_delete_model(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_DeleteModel_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_detect_language_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_detect_language_async.py index af4f19cb6206..3cdf6cff40e4 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_detect_language_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_detect_language_async.py @@ -50,4 +50,5 @@ async def sample_detect_language(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_DetectLanguage_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_detect_language_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_detect_language_sync.py index 330e0bc70be3..912c8d01ef2e 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_detect_language_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_detect_language_sync.py @@ -50,4 +50,5 @@ def sample_detect_language(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_DetectLanguage_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_async.py index 5399443cf332..73080d291126 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_async.py @@ -57,4 +57,5 @@ async def sample_export_data(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_ExportData_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_sync.py index b5916b2923ff..9c44046b760c 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_export_data_sync.py @@ -57,4 +57,5 @@ def sample_export_data(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_ExportData_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py index 933e2c9580c7..6e71436bad96 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_async.py @@ -49,4 +49,5 @@ async def sample_get_adaptive_mt_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetAdaptiveMtDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py index e938f96d1d95..1e5f699ad947 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_dataset_sync.py @@ -49,4 +49,5 @@ def sample_get_adaptive_mt_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetAdaptiveMtDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_async.py index c16f39bab484..e813f180a7b8 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_async.py @@ -49,4 +49,5 @@ async def sample_get_adaptive_mt_file(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetAdaptiveMtFile_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py index 74fac6dec7a8..b21c89e61704 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_adaptive_mt_file_sync.py @@ -49,4 +49,5 @@ def sample_get_adaptive_mt_file(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetAdaptiveMtFile_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_async.py index 37ecabdd7f00..2dcfc5f18f12 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_async.py @@ -49,4 +49,5 @@ async def sample_get_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetDataset_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_sync.py index 8c586348ee4a..d24dc21cb611 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_dataset_sync.py @@ -49,4 +49,5 @@ def sample_get_dataset(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetDataset_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_async.py index 13c4431095c3..b7724ed25caa 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_async.py @@ -49,4 +49,5 @@ async def sample_get_glossary(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetGlossary_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_async.py index 478da4c63882..01f9b4b3d952 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_async.py @@ -49,4 +49,5 @@ async def sample_get_glossary_entry(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetGlossaryEntry_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_sync.py index c085fc9f86eb..ed35377cf7cb 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_entry_sync.py @@ -49,4 +49,5 @@ def sample_get_glossary_entry(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetGlossaryEntry_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_sync.py index a154c99613e9..ec386cb2fe32 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_glossary_sync.py @@ -49,4 +49,5 @@ def sample_get_glossary(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetGlossary_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_async.py index 56279d5826f6..b78eb50a0647 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_async.py @@ -49,4 +49,5 @@ async def sample_get_model(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetModel_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_sync.py index 215e1d7bc091..f57e489a5c52 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_model_sync.py @@ -49,4 +49,5 @@ def sample_get_model(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetModel_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_supported_languages_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_supported_languages_async.py index 6f694ac310f0..07aa1a10738d 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_supported_languages_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_supported_languages_async.py @@ -49,4 +49,5 @@ async def sample_get_supported_languages(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetSupportedLanguages_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_supported_languages_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_supported_languages_sync.py index 2ad05ed0bb6e..b73717fdfa98 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_supported_languages_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_get_supported_languages_sync.py @@ -49,4 +49,5 @@ def sample_get_supported_languages(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_GetSupportedLanguages_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_async.py index 205323c10602..69dfc3287185 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_async.py @@ -41,7 +41,7 @@ async def sample_import_adaptive_mt_file(): # Initialize request argument(s) file_input_source = translate_v3.FileInputSource() file_input_source.mime_type = "mime_type_value" - file_input_source.content = b'content_blob' + file_input_source.content = b"content_blob" file_input_source.display_name = "display_name_value" request = translate_v3.ImportAdaptiveMtFileRequest( @@ -55,4 +55,5 @@ async def sample_import_adaptive_mt_file(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_ImportAdaptiveMtFile_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py index 49af2a4faaec..f287c82f4d81 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_adaptive_mt_file_sync.py @@ -41,7 +41,7 @@ def sample_import_adaptive_mt_file(): # Initialize request argument(s) file_input_source = translate_v3.FileInputSource() file_input_source.mime_type = "mime_type_value" - file_input_source.content = b'content_blob' + file_input_source.content = b"content_blob" file_input_source.display_name = "display_name_value" request = translate_v3.ImportAdaptiveMtFileRequest( @@ -55,4 +55,5 @@ def sample_import_adaptive_mt_file(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_ImportAdaptiveMtFile_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_async.py index 3e640620d710..f817db3593a8 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_async.py @@ -53,4 +53,5 @@ async def sample_import_data(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_ImportData_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_sync.py index fa83eb09ba45..06fee7d22071 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_import_data_sync.py @@ -53,4 +53,5 @@ def sample_import_data(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_ImportData_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py index 0863d31a90e8..a6a2a2f0ad41 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_async.py @@ -50,4 +50,5 @@ async def sample_list_adaptive_mt_datasets(): async for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py index a44f76fc98c9..d7200c65949d 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_datasets_sync.py @@ -50,4 +50,5 @@ def sample_list_adaptive_mt_datasets(): for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListAdaptiveMtDatasets_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_async.py index 46d94286f9e0..8b94b40449e8 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_async.py @@ -50,4 +50,5 @@ async def sample_list_adaptive_mt_files(): async for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListAdaptiveMtFiles_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py index 1f4ebc6dc132..ac283f5310e6 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_files_sync.py @@ -50,4 +50,5 @@ def sample_list_adaptive_mt_files(): for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListAdaptiveMtFiles_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py index e9ee2045eaf1..df2feda3d9c5 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_async.py @@ -50,4 +50,5 @@ async def sample_list_adaptive_mt_sentences(): async for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListAdaptiveMtSentences_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py index caf1dbf356ce..f35f8a8a34be 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_adaptive_mt_sentences_sync.py @@ -50,4 +50,5 @@ def sample_list_adaptive_mt_sentences(): for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListAdaptiveMtSentences_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_async.py index 40c6108647f8..1eca2023ce05 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_async.py @@ -50,4 +50,5 @@ async def sample_list_datasets(): async for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListDatasets_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_sync.py index 92d51bc501d3..06c50f351869 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_datasets_sync.py @@ -50,4 +50,5 @@ def sample_list_datasets(): for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListDatasets_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_async.py index 110cb2838ddf..23d7bf6d7c22 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_async.py @@ -50,4 +50,5 @@ async def sample_list_examples(): async for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListExamples_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_sync.py index 843e45f8988a..ce48ae55e352 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_examples_sync.py @@ -50,4 +50,5 @@ def sample_list_examples(): for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListExamples_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossaries_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossaries_async.py index 7f30d6a0dde1..6ef5809d5022 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossaries_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossaries_async.py @@ -50,4 +50,5 @@ async def sample_list_glossaries(): async for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListGlossaries_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossaries_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossaries_sync.py index e4fea55ce6b6..14aeed32d1ed 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossaries_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossaries_sync.py @@ -50,4 +50,5 @@ def sample_list_glossaries(): for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListGlossaries_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_async.py index e1ecaf86b1fb..709f27a5c9ae 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_async.py @@ -50,4 +50,5 @@ async def sample_list_glossary_entries(): async for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListGlossaryEntries_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_sync.py index 87d640567842..465ef7091692 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_glossary_entries_sync.py @@ -50,4 +50,5 @@ def sample_list_glossary_entries(): for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListGlossaryEntries_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_async.py index 6cdc3175ae0b..e0870f836dab 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_async.py @@ -50,4 +50,5 @@ async def sample_list_models(): async for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListModels_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_sync.py index 7cb9e9aec43e..37d3289e20fd 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_list_models_sync.py @@ -50,4 +50,5 @@ def sample_list_models(): for response in page_result: print(response) + # [END translate_v3_generated_TranslationService_ListModels_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_async.py index 0d30aef352ce..503ca9ef66e2 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_async.py @@ -41,7 +41,7 @@ async def sample_romanize_text(): # Initialize request argument(s) request = translate_v3.RomanizeTextRequest( parent="parent_value", - contents=['contents_value1', 'contents_value2'], + contents=["contents_value1", "contents_value2"], ) # Make the request @@ -50,4 +50,5 @@ async def sample_romanize_text(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_RomanizeText_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_sync.py index 75ca2db025ff..cf76e5e200b7 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_romanize_text_sync.py @@ -41,7 +41,7 @@ def sample_romanize_text(): # Initialize request argument(s) request = translate_v3.RomanizeTextRequest( parent="parent_value", - contents=['contents_value1', 'contents_value2'], + contents=["contents_value1", "contents_value2"], ) # Make the request @@ -50,4 +50,5 @@ def sample_romanize_text(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_RomanizeText_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_document_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_document_async.py index d203ce3c9e5a..de8678029d66 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_document_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_document_async.py @@ -40,7 +40,7 @@ async def sample_translate_document(): # Initialize request argument(s) document_input_config = translate_v3.DocumentInputConfig() - document_input_config.content = b'content_blob' + document_input_config.content = b"content_blob" request = translate_v3.TranslateDocumentRequest( parent="parent_value", @@ -54,4 +54,5 @@ async def sample_translate_document(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_TranslateDocument_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_document_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_document_sync.py index 293096ed28d5..2db95a5e85fc 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_document_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_document_sync.py @@ -40,7 +40,7 @@ def sample_translate_document(): # Initialize request argument(s) document_input_config = translate_v3.DocumentInputConfig() - document_input_config.content = b'content_blob' + document_input_config.content = b"content_blob" request = translate_v3.TranslateDocumentRequest( parent="parent_value", @@ -54,4 +54,5 @@ def sample_translate_document(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_TranslateDocument_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_text_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_text_async.py index d5980599f1b0..657d47c43fbc 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_text_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_text_async.py @@ -40,7 +40,7 @@ async def sample_translate_text(): # Initialize request argument(s) request = translate_v3.TranslateTextRequest( - contents=['contents_value1', 'contents_value2'], + contents=["contents_value1", "contents_value2"], target_language_code="target_language_code_value", parent="parent_value", ) @@ -51,4 +51,5 @@ async def sample_translate_text(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_TranslateText_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_text_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_text_sync.py index 25b1f2329712..eaff50ed8661 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_text_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_translate_text_sync.py @@ -40,7 +40,7 @@ def sample_translate_text(): # Initialize request argument(s) request = translate_v3.TranslateTextRequest( - contents=['contents_value1', 'contents_value2'], + contents=["contents_value1", "contents_value2"], target_language_code="target_language_code_value", parent="parent_value", ) @@ -51,4 +51,5 @@ def sample_translate_text(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_TranslateText_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_async.py index 9e387a553e64..1ff0e252f9b6 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_async.py @@ -56,4 +56,5 @@ async def sample_update_glossary(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_UpdateGlossary_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_async.py index 38e89e96de4c..a285ffff32f5 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_async.py @@ -39,8 +39,7 @@ async def sample_update_glossary_entry(): client = translate_v3.TranslationServiceAsyncClient() # Initialize request argument(s) - request = translate_v3.UpdateGlossaryEntryRequest( - ) + request = translate_v3.UpdateGlossaryEntryRequest() # Make the request response = await client.update_glossary_entry(request=request) @@ -48,4 +47,5 @@ async def sample_update_glossary_entry(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_UpdateGlossaryEntry_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_sync.py index ace4ad9ff454..84f5f0108909 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_entry_sync.py @@ -39,8 +39,7 @@ def sample_update_glossary_entry(): client = translate_v3.TranslationServiceClient() # Initialize request argument(s) - request = translate_v3.UpdateGlossaryEntryRequest( - ) + request = translate_v3.UpdateGlossaryEntryRequest() # Make the request response = client.update_glossary_entry(request=request) @@ -48,4 +47,5 @@ def sample_update_glossary_entry(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_UpdateGlossaryEntry_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_sync.py index 9fc6c23279b3..75b1c6f8be81 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3_generated_translation_service_update_glossary_sync.py @@ -56,4 +56,5 @@ def sample_update_glossary(): # Handle the response print(response) + # [END translate_v3_generated_TranslationService_UpdateGlossary_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_document_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_document_async.py index c3b25459632e..ceec4f269de1 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_document_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_document_async.py @@ -48,7 +48,10 @@ async def sample_batch_translate_document(): request = translate_v3beta1.BatchTranslateDocumentRequest( parent="parent_value", source_language_code="source_language_code_value", - target_language_codes=['target_language_codes_value1', 'target_language_codes_value2'], + target_language_codes=[ + "target_language_codes_value1", + "target_language_codes_value2", + ], input_configs=input_configs, output_config=output_config, ) @@ -63,4 +66,5 @@ async def sample_batch_translate_document(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_BatchTranslateDocument_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_document_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_document_sync.py index 825972b6706c..3e04278cb1bc 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_document_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_document_sync.py @@ -48,7 +48,10 @@ def sample_batch_translate_document(): request = translate_v3beta1.BatchTranslateDocumentRequest( parent="parent_value", source_language_code="source_language_code_value", - target_language_codes=['target_language_codes_value1', 'target_language_codes_value2'], + target_language_codes=[ + "target_language_codes_value1", + "target_language_codes_value2", + ], input_configs=input_configs, output_config=output_config, ) @@ -63,4 +66,5 @@ def sample_batch_translate_document(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_BatchTranslateDocument_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_text_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_text_async.py index f468e8eabc16..f3a8da3573a5 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_text_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_text_async.py @@ -48,7 +48,10 @@ async def sample_batch_translate_text(): request = translate_v3beta1.BatchTranslateTextRequest( parent="parent_value", source_language_code="source_language_code_value", - target_language_codes=['target_language_codes_value1', 'target_language_codes_value2'], + target_language_codes=[ + "target_language_codes_value1", + "target_language_codes_value2", + ], input_configs=input_configs, output_config=output_config, ) @@ -63,4 +66,5 @@ async def sample_batch_translate_text(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_BatchTranslateText_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_text_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_text_sync.py index 4150b77b7862..af663d66df06 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_text_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_batch_translate_text_sync.py @@ -48,7 +48,10 @@ def sample_batch_translate_text(): request = translate_v3beta1.BatchTranslateTextRequest( parent="parent_value", source_language_code="source_language_code_value", - target_language_codes=['target_language_codes_value1', 'target_language_codes_value2'], + target_language_codes=[ + "target_language_codes_value1", + "target_language_codes_value2", + ], input_configs=input_configs, output_config=output_config, ) @@ -63,4 +66,5 @@ def sample_batch_translate_text(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_BatchTranslateText_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_create_glossary_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_create_glossary_async.py index 5a88fd2a108d..22707779681b 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_create_glossary_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_create_glossary_async.py @@ -57,4 +57,5 @@ async def sample_create_glossary(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_CreateGlossary_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_create_glossary_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_create_glossary_sync.py index 232091c77675..5122c91928c0 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_create_glossary_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_create_glossary_sync.py @@ -57,4 +57,5 @@ def sample_create_glossary(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_CreateGlossary_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_delete_glossary_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_delete_glossary_async.py index 56e8dc2b4135..f915e4dc9dcd 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_delete_glossary_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_delete_glossary_async.py @@ -53,4 +53,5 @@ async def sample_delete_glossary(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_DeleteGlossary_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_delete_glossary_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_delete_glossary_sync.py index aaf55bd188f6..7093fac05791 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_delete_glossary_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_delete_glossary_sync.py @@ -53,4 +53,5 @@ def sample_delete_glossary(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_DeleteGlossary_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_detect_language_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_detect_language_async.py index e1955dc49b36..6247e000cc34 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_detect_language_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_detect_language_async.py @@ -50,4 +50,5 @@ async def sample_detect_language(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_DetectLanguage_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_detect_language_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_detect_language_sync.py index 2ea76c3639c2..9468c0704f46 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_detect_language_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_detect_language_sync.py @@ -50,4 +50,5 @@ def sample_detect_language(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_DetectLanguage_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_async.py index d59978a6fd45..238502578aea 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_async.py @@ -49,4 +49,5 @@ async def sample_get_glossary(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_GetGlossary_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_sync.py index ecbbeeb6c264..f4ace3daf814 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_glossary_sync.py @@ -49,4 +49,5 @@ def sample_get_glossary(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_GetGlossary_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_supported_languages_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_supported_languages_async.py index cdf135714a67..e625f5729c83 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_supported_languages_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_supported_languages_async.py @@ -49,4 +49,5 @@ async def sample_get_supported_languages(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_GetSupportedLanguages_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_supported_languages_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_supported_languages_sync.py index 99cec6efe150..9341e3c3bc33 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_supported_languages_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_get_supported_languages_sync.py @@ -49,4 +49,5 @@ def sample_get_supported_languages(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_GetSupportedLanguages_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_list_glossaries_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_list_glossaries_async.py index db9edc0380db..90519ded0032 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_list_glossaries_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_list_glossaries_async.py @@ -50,4 +50,5 @@ async def sample_list_glossaries(): async for response in page_result: print(response) + # [END translate_v3beta1_generated_TranslationService_ListGlossaries_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_list_glossaries_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_list_glossaries_sync.py index a6fbeef83277..3fbe0362c3ae 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_list_glossaries_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_list_glossaries_sync.py @@ -50,4 +50,5 @@ def sample_list_glossaries(): for response in page_result: print(response) + # [END translate_v3beta1_generated_TranslationService_ListGlossaries_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_refine_text_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_refine_text_async.py new file mode 100644 index 000000000000..29538eb2153e --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_refine_text_async.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RefineText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3beta1_generated_TranslationService_RefineText_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3beta1 + + +async def sample_refine_text(): + # Create a client + client = translate_v3beta1.TranslationServiceAsyncClient() + + # Initialize request argument(s) + refinement_entries = translate_v3beta1.RefinementEntry() + refinement_entries.source_text = "source_text_value" + refinement_entries.original_translation = "original_translation_value" + + request = translate_v3beta1.RefineTextRequest( + parent="parent_value", + refinement_entries=refinement_entries, + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + ) + + # Make the request + response = await client.refine_text(request=request) + + # Handle the response + print(response) + + +# [END translate_v3beta1_generated_TranslationService_RefineText_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_refine_text_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_refine_text_sync.py new file mode 100644 index 000000000000..55429dcc7d9c --- /dev/null +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_refine_text_sync.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RefineText +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-translate + + +# [START translate_v3beta1_generated_TranslationService_RefineText_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import translate_v3beta1 + + +def sample_refine_text(): + # Create a client + client = translate_v3beta1.TranslationServiceClient() + + # Initialize request argument(s) + refinement_entries = translate_v3beta1.RefinementEntry() + refinement_entries.source_text = "source_text_value" + refinement_entries.original_translation = "original_translation_value" + + request = translate_v3beta1.RefineTextRequest( + parent="parent_value", + refinement_entries=refinement_entries, + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + ) + + # Make the request + response = client.refine_text(request=request) + + # Handle the response + print(response) + + +# [END translate_v3beta1_generated_TranslationService_RefineText_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_document_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_document_async.py index ab0b75f3f98b..2795b2ec0b81 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_document_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_document_async.py @@ -40,7 +40,7 @@ async def sample_translate_document(): # Initialize request argument(s) document_input_config = translate_v3beta1.DocumentInputConfig() - document_input_config.content = b'content_blob' + document_input_config.content = b"content_blob" request = translate_v3beta1.TranslateDocumentRequest( parent="parent_value", @@ -54,4 +54,5 @@ async def sample_translate_document(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_TranslateDocument_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_document_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_document_sync.py index d5acae559b95..7ca55103eec9 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_document_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_document_sync.py @@ -40,7 +40,7 @@ def sample_translate_document(): # Initialize request argument(s) document_input_config = translate_v3beta1.DocumentInputConfig() - document_input_config.content = b'content_blob' + document_input_config.content = b"content_blob" request = translate_v3beta1.TranslateDocumentRequest( parent="parent_value", @@ -54,4 +54,5 @@ def sample_translate_document(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_TranslateDocument_sync] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_text_async.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_text_async.py index fc056f2d18cd..f432c8161d8c 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_text_async.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_text_async.py @@ -40,7 +40,7 @@ async def sample_translate_text(): # Initialize request argument(s) request = translate_v3beta1.TranslateTextRequest( - contents=['contents_value1', 'contents_value2'], + contents=["contents_value1", "contents_value2"], target_language_code="target_language_code_value", parent="parent_value", ) @@ -51,4 +51,5 @@ async def sample_translate_text(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_TranslateText_async] diff --git a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_text_sync.py b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_text_sync.py index 77c717a78276..7dc7e81dde5d 100644 --- a/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_text_sync.py +++ b/packages/google-cloud-translate/samples/generated_samples/translate_v3beta1_generated_translation_service_translate_text_sync.py @@ -40,7 +40,7 @@ def sample_translate_text(): # Initialize request argument(s) request = translate_v3beta1.TranslateTextRequest( - contents=['contents_value1', 'contents_value2'], + contents=["contents_value1", "contents_value2"], target_language_code="target_language_code_value", parent="parent_value", ) @@ -51,4 +51,5 @@ def sample_translate_text(): # Handle the response print(response) + # [END translate_v3beta1_generated_TranslationService_TranslateText_sync] diff --git a/packages/google-cloud-translate/scripts/fixup_translate_v3beta1_keywords.py b/packages/google-cloud-translate/scripts/fixup_translate_v3beta1_keywords.py index c03a90eefed6..3245ca46cb04 100644 --- a/packages/google-cloud-translate/scripts/fixup_translate_v3beta1_keywords.py +++ b/packages/google-cloud-translate/scripts/fixup_translate_v3beta1_keywords.py @@ -39,7 +39,7 @@ def partition( class translateCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'batch_translate_document': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', 'format_conversions', 'customized_attribution', 'enable_shadow_removal_native_pdf', 'enable_rotation_correction', ), + 'batch_translate_document': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', 'format_conversions', 'customized_attribution', 'enable_shadow_removal_native_pdf', 'enable_rotation_correction', 'pdf_native_only', ), 'batch_translate_text': ('parent', 'source_language_code', 'target_language_codes', 'input_configs', 'output_config', 'models', 'glossaries', 'labels', ), 'create_glossary': ('parent', 'glossary', ), 'delete_glossary': ('name', ), @@ -47,6 +47,7 @@ class translateCallTransformer(cst.CSTTransformer): 'get_glossary': ('name', ), 'get_supported_languages': ('parent', 'display_language_code', 'model', ), 'list_glossaries': ('parent', 'page_size', 'page_token', 'filter', ), + 'refine_text': ('parent', 'refinement_entries', 'source_language_code', 'target_language_code', ), 'translate_document': ('parent', 'target_language_code', 'document_input_config', 'source_language_code', 'document_output_config', 'model', 'glossary_config', 'labels', 'customized_attribution', 'is_translate_native_pdf_only', 'enable_shadow_removal_native_pdf', 'enable_rotation_correction', ), 'translate_text': ('contents', 'target_language_code', 'parent', 'mime_type', 'source_language_code', 'model', 'glossary_config', 'labels', ), } diff --git a/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py b/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py index 8dac40a47dd5..5854dce9b383 100644 --- a/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py +++ b/packages/google-cloud-translate/tests/unit/gapic/translate_v3beta1/test_translation_service.py @@ -4617,6 +4617,251 @@ async def test_delete_glossary_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + translation_service.RefineTextRequest, + dict, + ], +) +def test_refine_text(request_type, transport: str = "grpc"): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.refine_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = translation_service.RefineTextResponse( + refined_translations=["refined_translations_value"], + ) + response = client.refine_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = translation_service.RefineTextRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.RefineTextResponse) + assert response.refined_translations == ["refined_translations_value"] + + +def test_refine_text_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = translation_service.RefineTextRequest( + parent="parent_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.refine_text), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.refine_text(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == translation_service.RefineTextRequest( + parent="parent_value", + source_language_code="source_language_code_value", + target_language_code="target_language_code_value", + ) + + +def test_refine_text_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.refine_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.refine_text] = mock_rpc + request = {} + client.refine_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.refine_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_refine_text_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = TranslationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.refine_text + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.refine_text + ] = mock_rpc + + request = {} + await client.refine_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.refine_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_refine_text_async( + transport: str = "grpc_asyncio", request_type=translation_service.RefineTextRequest +): + client = TranslationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.refine_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + translation_service.RefineTextResponse( + refined_translations=["refined_translations_value"], + ) + ) + response = await client.refine_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = translation_service.RefineTextRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.RefineTextResponse) + assert response.refined_translations == ["refined_translations_value"] + + +@pytest.mark.asyncio +async def test_refine_text_async_from_dict(): + await test_refine_text_async(request_type=dict) + + +def test_refine_text_field_headers(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = translation_service.RefineTextRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.refine_text), "__call__") as call: + call.return_value = translation_service.RefineTextResponse() + client.refine_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_refine_text_field_headers_async(): + client = TranslationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = translation_service.RefineTextRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.refine_text), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + translation_service.RefineTextResponse() + ) + await client.refine_text(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + def test_translate_text_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6462,6 +6707,145 @@ def test_delete_glossary_rest_flattened_error(transport: str = "rest"): ) +def test_refine_text_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.refine_text in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.refine_text] = mock_rpc + + request = {} + client.refine_text(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.refine_text(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_refine_text_rest_required_fields( + request_type=translation_service.RefineTextRequest, +): + transport_class = transports.TranslationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["source_language_code"] = "" + request_init["target_language_code"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).refine_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + jsonified_request["sourceLanguageCode"] = "source_language_code_value" + jsonified_request["targetLanguageCode"] = "target_language_code_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).refine_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "sourceLanguageCode" in jsonified_request + assert jsonified_request["sourceLanguageCode"] == "source_language_code_value" + assert "targetLanguageCode" in jsonified_request + assert jsonified_request["targetLanguageCode"] == "target_language_code_value" + + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = translation_service.RefineTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.RefineTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.refine_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_refine_text_rest_unset_required_fields(): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.refine_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "refinementEntries", + "sourceLanguageCode", + "targetLanguageCode", + ) + ) + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.TranslationServiceGrpcTransport( @@ -6786,6 +7170,27 @@ def test_delete_glossary_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_refine_text_empty_call_grpc(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.refine_text), "__call__") as call: + call.return_value = translation_service.RefineTextResponse() + client.refine_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = translation_service.RefineTextRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = TranslationServiceAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -7065,6 +7470,33 @@ async def test_delete_glossary_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_refine_text_empty_call_grpc_asyncio(): + client = TranslationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.refine_text), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + translation_service.RefineTextResponse( + refined_translations=["refined_translations_value"], + ) + ) + await client.refine_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = translation_service.RefineTextRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = TranslationServiceClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -8436,6 +8868,138 @@ def test_delete_glossary_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_refine_text_rest_bad_request( + request_type=translation_service.RefineTextRequest, +): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.refine_text(request) + + +@pytest.mark.parametrize( + "request_type", + [ + translation_service.RefineTextRequest, + dict, + ], +) +def test_refine_text_rest_call_success(request_type): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = translation_service.RefineTextResponse( + refined_translations=["refined_translations_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = translation_service.RefineTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.refine_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, translation_service.RefineTextResponse) + assert response.refined_translations == ["refined_translations_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_refine_text_rest_interceptors(null_interceptor): + transport = transports.TranslationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.TranslationServiceRestInterceptor(), + ) + client = TranslationServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_refine_text" + ) as post, mock.patch.object( + transports.TranslationServiceRestInterceptor, "post_refine_text_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.TranslationServiceRestInterceptor, "pre_refine_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = translation_service.RefineTextRequest.pb( + translation_service.RefineTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = translation_service.RefineTextResponse.to_json( + translation_service.RefineTextResponse() + ) + req.return_value.content = return_value + + request = translation_service.RefineTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = translation_service.RefineTextResponse() + post_with_metadata.return_value = ( + translation_service.RefineTextResponse(), + metadata, + ) + + client.refine_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9081,6 +9645,26 @@ def test_delete_glossary_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_refine_text_empty_call_rest(): + client = TranslationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.refine_text), "__call__") as call: + client.refine_text(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = translation_service.RefineTextRequest() + + assert args[0] == request_msg + + def test_translation_service_rest_lro_client(): client = TranslationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9141,6 +9725,7 @@ def test_translation_service_base_transport(): "list_glossaries", "get_glossary", "delete_glossary", + "refine_text", "get_location", "list_locations", "get_operation", @@ -9455,6 +10040,9 @@ def test_translation_service_client_transport_session_collision(transport_name): session1 = client1.transport.delete_glossary._session session2 = client2.transport.delete_glossary._session assert session1 != session2 + session1 = client1.transport.refine_text._session + session2 = client2.transport.refine_text._session + assert session1 != session2 def test_translation_service_grpc_transport_channel(): diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1/types/datasourcetypes.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1/types/datasourcetypes.py index bf1674d6289b..3ccc451b596e 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1/types/datasourcetypes.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1/types/datasourcetypes.py @@ -87,11 +87,6 @@ class PrimaryProductDataSource(proto.Message): Optional. Default rule management of the data source. If set, the linked data sources will be replaced. - contains_custom_rules (bool): - Output only. The existing data source setup contains at - least one custom (non-default) rule and therefore its - management through the ``default_rule_data_sources`` field - should be treated with caution. destinations (MutableSequence[google.shopping.merchant_datasources_v1.types.PrimaryProductDataSource.Destination]): Optional. A list of destinations describing where products of the data source can be shown. @@ -218,10 +213,6 @@ class State(proto.Enum): number=7, message=DefaultRule, ) - contains_custom_rules: bool = proto.Field( - proto.BOOL, - number=9, - ) destinations: MutableSequence[Destination] = proto.RepeatedField( proto.MESSAGE, number=10, diff --git a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1/types/fileinputs.py b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1/types/fileinputs.py index 15b3b6852f54..94d6cfcb4fb8 100644 --- a/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1/types/fileinputs.py +++ b/packages/google-shopping-merchant-datasources/google/shopping/merchant_datasources_v1/types/fileinputs.py @@ -119,15 +119,12 @@ class FetchSettings(proto.Message): class Frequency(proto.Enum): r"""The required fields vary based on the frequency of fetching. For a - monthly fetch schedule, ``day of month`` and [hour of - day][https://developers.google.com/merchant/api/reference/rest/datasources_v1beta/accounts.dataSources#timeofday] - are required. For a weekly fetch schedule, [day of - week][https://developers.google.com/merchant/api/reference/rest/datasources_v1beta/accounts.dataSources#dayofweek] - and [hour of - day][https://developers.google.com/merchant/api/reference/rest/datasources_v1beta/accounts.dataSources#timeofday] - are required. For a daily fetch schedule, only an [hour of - day][https://developers.google.com/merchant/api/reference/rest/datasources_v1beta/accounts.dataSources#timeofday] - is required. + monthly fetch schedule, ``day of month`` and `hour of + day <../Shared.Types/TimeOfDay>`__ are required. For a weekly fetch + schedule, `day of week <../Shared.Types/DayOfWeek>`__ and `hour of + day <../Shared.Types/TimeOfDay>`__ are required. For a daily fetch + schedule, only an `hour of day <../Shared.Types/TimeOfDay>`__ is + required. Values: FREQUENCY_UNSPECIFIED (0): diff --git a/packages/google-shopping-merchant-datasources/noxfile.py b/packages/google-shopping-merchant-datasources/noxfile.py index c2ea8a8a5812..71aaf3092a97 100644 --- a/packages/google-shopping-merchant-datasources/noxfile.py +++ b/packages/google-shopping-merchant-datasources/noxfile.py @@ -27,6 +27,10 @@ LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") + ALL_PYTHON = [ "3.7", "3.8", diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_create_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_create_data_source_async.py index fc77e8509f60..d216f8c81563 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_create_data_source_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_create_data_source_async.py @@ -54,4 +54,5 @@ async def sample_create_data_source(): # Handle the response print(response) + # [END merchantapi_v1_generated_DataSourcesService_CreateDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_create_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_create_data_source_sync.py index c153d1f3ef1a..b90d8991fa01 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_create_data_source_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_create_data_source_sync.py @@ -54,4 +54,5 @@ def sample_create_data_source(): # Handle the response print(response) + # [END merchantapi_v1_generated_DataSourcesService_CreateDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_get_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_get_data_source_async.py index e840eab3cc85..fbc2c8f7c875 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_get_data_source_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_get_data_source_async.py @@ -49,4 +49,5 @@ async def sample_get_data_source(): # Handle the response print(response) + # [END merchantapi_v1_generated_DataSourcesService_GetDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_get_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_get_data_source_sync.py index 9d6ff8185f36..35747f467cd9 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_get_data_source_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_get_data_source_sync.py @@ -49,4 +49,5 @@ def sample_get_data_source(): # Handle the response print(response) + # [END merchantapi_v1_generated_DataSourcesService_GetDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_list_data_sources_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_list_data_sources_async.py index a8f532b962e0..fccaefa59409 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_list_data_sources_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_list_data_sources_async.py @@ -50,4 +50,5 @@ async def sample_list_data_sources(): async for response in page_result: print(response) + # [END merchantapi_v1_generated_DataSourcesService_ListDataSources_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_list_data_sources_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_list_data_sources_sync.py index 3b8948be2275..6e70fb4b82cb 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_list_data_sources_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_list_data_sources_sync.py @@ -50,4 +50,5 @@ def sample_list_data_sources(): for response in page_result: print(response) + # [END merchantapi_v1_generated_DataSourcesService_ListDataSources_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_update_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_update_data_source_async.py index 57c72d7635da..6d62a5d14954 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_update_data_source_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_update_data_source_async.py @@ -53,4 +53,5 @@ async def sample_update_data_source(): # Handle the response print(response) + # [END merchantapi_v1_generated_DataSourcesService_UpdateDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_update_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_update_data_source_sync.py index 390ede174c7c..db034753267c 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_update_data_source_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_data_sources_service_update_data_source_sync.py @@ -53,4 +53,5 @@ def sample_update_data_source(): # Handle the response print(response) + # [END merchantapi_v1_generated_DataSourcesService_UpdateDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_file_uploads_service_get_file_upload_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_file_uploads_service_get_file_upload_async.py index 939f574cceea..88ab95a59039 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_file_uploads_service_get_file_upload_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_file_uploads_service_get_file_upload_async.py @@ -49,4 +49,5 @@ async def sample_get_file_upload(): # Handle the response print(response) + # [END merchantapi_v1_generated_FileUploadsService_GetFileUpload_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_file_uploads_service_get_file_upload_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_file_uploads_service_get_file_upload_sync.py index 583eab992590..50d7fb8f451a 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_file_uploads_service_get_file_upload_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1_generated_file_uploads_service_get_file_upload_sync.py @@ -49,4 +49,5 @@ def sample_get_file_upload(): # Handle the response print(response) + # [END merchantapi_v1_generated_FileUploadsService_GetFileUpload_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py index b70e289ef57e..c0ebe5e62ec3 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_async.py @@ -53,4 +53,5 @@ async def sample_create_data_source(): # Handle the response print(response) + # [END merchantapi_v1beta_generated_DataSourcesService_CreateDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py index 778d2f22809d..1777065aa7bf 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_create_data_source_sync.py @@ -53,4 +53,5 @@ def sample_create_data_source(): # Handle the response print(response) + # [END merchantapi_v1beta_generated_DataSourcesService_CreateDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py index 579d2710c625..08ac71f97871 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_async.py @@ -49,4 +49,5 @@ async def sample_get_data_source(): # Handle the response print(response) + # [END merchantapi_v1beta_generated_DataSourcesService_GetDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py index d53b33132a51..28a021093548 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_get_data_source_sync.py @@ -49,4 +49,5 @@ def sample_get_data_source(): # Handle the response print(response) + # [END merchantapi_v1beta_generated_DataSourcesService_GetDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py index 74fcbd90b0d9..f857a07af721 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_async.py @@ -50,4 +50,5 @@ async def sample_list_data_sources(): async for response in page_result: print(response) + # [END merchantapi_v1beta_generated_DataSourcesService_ListDataSources_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py index 3b0c63cfc034..34c3b2187d4d 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_list_data_sources_sync.py @@ -50,4 +50,5 @@ def sample_list_data_sources(): for response in page_result: print(response) + # [END merchantapi_v1beta_generated_DataSourcesService_ListDataSources_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py index 9863339d3e70..d673c3c1d70b 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_async.py @@ -52,4 +52,5 @@ async def sample_update_data_source(): # Handle the response print(response) + # [END merchantapi_v1beta_generated_DataSourcesService_UpdateDataSource_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py index 2c6f4070bc2d..79d5b2752dc7 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_data_sources_service_update_data_source_sync.py @@ -52,4 +52,5 @@ def sample_update_data_source(): # Handle the response print(response) + # [END merchantapi_v1beta_generated_DataSourcesService_UpdateDataSource_sync] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py index c4751a8993c0..5fbfb26df0bb 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_async.py @@ -49,4 +49,5 @@ async def sample_get_file_upload(): # Handle the response print(response) + # [END merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_async] diff --git a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py index 8c77dd167031..dfe2e3eddfde 100644 --- a/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py +++ b/packages/google-shopping-merchant-datasources/samples/generated_samples/merchantapi_v1beta_generated_file_uploads_service_get_file_upload_sync.py @@ -49,4 +49,5 @@ def sample_get_file_upload(): # Handle the response print(response) + # [END merchantapi_v1beta_generated_FileUploadsService_GetFileUpload_sync] diff --git a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1/test_data_sources_service.py b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1/test_data_sources_service.py index 5b3392df511a..e762b9c8f33f 100644 --- a/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1/test_data_sources_service.py +++ b/packages/google-shopping-merchant-datasources/tests/unit/gapic/merchant_datasources_v1/test_data_sources_service.py @@ -5271,7 +5271,6 @@ def test_create_data_source_rest_call_success(request_type): } ] }, - "contains_custom_rules": True, "destinations": [{"destination": 1, "state": 1}], }, "supplemental_product_data_source": { @@ -5534,7 +5533,6 @@ def test_update_data_source_rest_call_success(request_type): } ] }, - "contains_custom_rules": True, "destinations": [{"destination": 1, "state": 1}], }, "supplemental_product_data_source": {