diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py index e0c132c1f5f7..6d637114bc16 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/__init__.py @@ -37,25 +37,33 @@ ) from google.cloud.backupdr_v1.types.backupplan import ( BackupPlan, + BackupPlanRevision, BackupRule, BackupWindow, CreateBackupPlanRequest, DeleteBackupPlanRequest, GetBackupPlanRequest, + GetBackupPlanRevisionRequest, + ListBackupPlanRevisionsRequest, + ListBackupPlanRevisionsResponse, ListBackupPlansRequest, ListBackupPlansResponse, StandardSchedule, + UpdateBackupPlanRequest, WeekDayOfMonth, ) from google.cloud.backupdr_v1.types.backupplanassociation import ( BackupPlanAssociation, CreateBackupPlanAssociationRequest, DeleteBackupPlanAssociationRequest, + FetchBackupPlanAssociationsForResourceTypeRequest, + FetchBackupPlanAssociationsForResourceTypeResponse, GetBackupPlanAssociationRequest, ListBackupPlanAssociationsRequest, ListBackupPlanAssociationsResponse, RuleConfigInfo, TriggerBackupRequest, + UpdateBackupPlanAssociationRequest, ) from google.cloud.backupdr_v1.types.backupvault import ( Backup, @@ -97,6 +105,20 @@ from google.cloud.backupdr_v1.types.backupvault_ba import ( BackupApplianceBackupProperties, ) +from google.cloud.backupdr_v1.types.backupvault_cloudsql import ( + CloudSqlInstanceBackupPlanAssociationProperties, + CloudSqlInstanceBackupProperties, + CloudSqlInstanceDataSourceProperties, + CloudSqlInstanceDataSourceReferenceProperties, + CloudSqlInstanceInitializationConfig, +) +from google.cloud.backupdr_v1.types.backupvault_disk import ( + DiskBackupProperties, + DiskDataSourceProperties, + DiskRestoreProperties, + DiskTargetEnvironment, + RegionDiskTargetEnvironment, +) from google.cloud.backupdr_v1.types.backupvault_gce import ( AcceleratorConfig, AccessConfig, @@ -123,6 +145,14 @@ ServiceAccount, Tags, ) +from google.cloud.backupdr_v1.types.datasourcereference import ( + DataSourceBackupConfigInfo, + DataSourceGcpResourceInfo, + DataSourceReference, + FetchDataSourceReferencesForResourceTypeRequest, + FetchDataSourceReferencesForResourceTypeResponse, + GetDataSourceReferenceRequest, +) __all__ = ( "BackupDRClient", @@ -141,23 +171,31 @@ "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", "BackupPlan", + "BackupPlanRevision", "BackupRule", "BackupWindow", "CreateBackupPlanRequest", "DeleteBackupPlanRequest", "GetBackupPlanRequest", + "GetBackupPlanRevisionRequest", + "ListBackupPlanRevisionsRequest", + "ListBackupPlanRevisionsResponse", "ListBackupPlansRequest", "ListBackupPlansResponse", "StandardSchedule", + "UpdateBackupPlanRequest", "WeekDayOfMonth", "BackupPlanAssociation", "CreateBackupPlanAssociationRequest", "DeleteBackupPlanAssociationRequest", + "FetchBackupPlanAssociationsForResourceTypeRequest", + "FetchBackupPlanAssociationsForResourceTypeResponse", "GetBackupPlanAssociationRequest", "ListBackupPlanAssociationsRequest", "ListBackupPlanAssociationsResponse", "RuleConfigInfo", "TriggerBackupRequest", + "UpdateBackupPlanAssociationRequest", "Backup", "BackupApplianceBackupConfig", "BackupApplianceLockInfo", @@ -194,6 +232,16 @@ "BackupVaultView", "BackupView", "BackupApplianceBackupProperties", + "CloudSqlInstanceBackupPlanAssociationProperties", + "CloudSqlInstanceBackupProperties", + "CloudSqlInstanceDataSourceProperties", + "CloudSqlInstanceDataSourceReferenceProperties", + "CloudSqlInstanceInitializationConfig", + "DiskBackupProperties", + "DiskDataSourceProperties", + "DiskRestoreProperties", + "DiskTargetEnvironment", + "RegionDiskTargetEnvironment", "AcceleratorConfig", "AccessConfig", "AdvancedMachineFeatures", @@ -218,4 +266,10 @@ "ServiceAccount", "Tags", "KeyRevocationActionType", + "DataSourceBackupConfigInfo", + "DataSourceGcpResourceInfo", + "DataSourceReference", + "FetchDataSourceReferencesForResourceTypeRequest", + "FetchDataSourceReferencesForResourceTypeResponse", + "GetDataSourceReferenceRequest", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py index 435929ce46e7..20a9cd975b02 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py index 29b676c5bc1d..ecbf09aa3b54 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/__init__.py @@ -36,25 +36,33 @@ ) from .types.backupplan import ( BackupPlan, + BackupPlanRevision, BackupRule, BackupWindow, CreateBackupPlanRequest, DeleteBackupPlanRequest, GetBackupPlanRequest, + GetBackupPlanRevisionRequest, + ListBackupPlanRevisionsRequest, + ListBackupPlanRevisionsResponse, ListBackupPlansRequest, ListBackupPlansResponse, StandardSchedule, + UpdateBackupPlanRequest, WeekDayOfMonth, ) from .types.backupplanassociation import ( BackupPlanAssociation, CreateBackupPlanAssociationRequest, DeleteBackupPlanAssociationRequest, + FetchBackupPlanAssociationsForResourceTypeRequest, + FetchBackupPlanAssociationsForResourceTypeResponse, GetBackupPlanAssociationRequest, ListBackupPlanAssociationsRequest, ListBackupPlanAssociationsResponse, RuleConfigInfo, TriggerBackupRequest, + UpdateBackupPlanAssociationRequest, ) from .types.backupvault import ( Backup, @@ -94,6 +102,20 @@ UpdateDataSourceRequest, ) from .types.backupvault_ba import BackupApplianceBackupProperties +from .types.backupvault_cloudsql import ( + CloudSqlInstanceBackupPlanAssociationProperties, + CloudSqlInstanceBackupProperties, + CloudSqlInstanceDataSourceProperties, + CloudSqlInstanceDataSourceReferenceProperties, + CloudSqlInstanceInitializationConfig, +) +from .types.backupvault_disk import ( + DiskBackupProperties, + DiskDataSourceProperties, + DiskRestoreProperties, + DiskTargetEnvironment, + RegionDiskTargetEnvironment, +) from .types.backupvault_gce import ( AcceleratorConfig, AccessConfig, @@ -120,6 +142,14 @@ ServiceAccount, Tags, ) +from .types.datasourcereference import ( + DataSourceBackupConfigInfo, + DataSourceGcpResourceInfo, + DataSourceReference, + FetchDataSourceReferencesForResourceTypeRequest, + FetchDataSourceReferencesForResourceTypeResponse, + GetDataSourceReferenceRequest, +) __all__ = ( "BackupDRAsyncClient", @@ -139,11 +169,17 @@ "BackupLock", "BackupPlan", "BackupPlanAssociation", + "BackupPlanRevision", "BackupRule", "BackupVault", "BackupVaultView", "BackupView", "BackupWindow", + "CloudSqlInstanceBackupPlanAssociationProperties", + "CloudSqlInstanceBackupProperties", + "CloudSqlInstanceDataSourceProperties", + "CloudSqlInstanceDataSourceReferenceProperties", + "CloudSqlInstanceInitializationConfig", "ComputeInstanceBackupProperties", "ComputeInstanceDataSourceProperties", "ComputeInstanceRestoreProperties", @@ -156,22 +192,35 @@ "CustomerEncryptionKey", "DataSource", "DataSourceBackupApplianceApplication", + "DataSourceBackupConfigInfo", "DataSourceGcpResource", + "DataSourceGcpResourceInfo", + "DataSourceReference", "DeleteBackupPlanAssociationRequest", "DeleteBackupPlanRequest", "DeleteBackupRequest", "DeleteBackupVaultRequest", "DeleteManagementServerRequest", + "DiskBackupProperties", + "DiskDataSourceProperties", + "DiskRestoreProperties", + "DiskTargetEnvironment", "DisplayDevice", "Entry", + "FetchBackupPlanAssociationsForResourceTypeRequest", + "FetchBackupPlanAssociationsForResourceTypeResponse", + "FetchDataSourceReferencesForResourceTypeRequest", + "FetchDataSourceReferencesForResourceTypeResponse", "FetchUsableBackupVaultsRequest", "FetchUsableBackupVaultsResponse", "GcpBackupConfig", "GcpResource", "GetBackupPlanAssociationRequest", "GetBackupPlanRequest", + "GetBackupPlanRevisionRequest", "GetBackupRequest", "GetBackupVaultRequest", + "GetDataSourceReferenceRequest", "GetDataSourceRequest", "GetManagementServerRequest", "GuestOsFeature", @@ -181,6 +230,8 @@ "KeyRevocationActionType", "ListBackupPlanAssociationsRequest", "ListBackupPlanAssociationsResponse", + "ListBackupPlanRevisionsRequest", + "ListBackupPlanRevisionsResponse", "ListBackupPlansRequest", "ListBackupPlansResponse", "ListBackupVaultsRequest", @@ -198,6 +249,7 @@ "NetworkInterface", "NetworkPerformanceConfig", "OperationMetadata", + "RegionDiskTargetEnvironment", "RestoreBackupRequest", "RestoreBackupResponse", "RuleConfigInfo", @@ -209,6 +261,8 @@ "Tags", "TargetResource", "TriggerBackupRequest", + "UpdateBackupPlanAssociationRequest", + "UpdateBackupPlanRequest", "UpdateBackupRequest", "UpdateBackupVaultRequest", "UpdateDataSourceRequest", diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json index 5534a346d83c..3f336db90724 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_metadata.json @@ -55,6 +55,16 @@ "delete_management_server" ] }, + "FetchBackupPlanAssociationsForResourceType": { + "methods": [ + "fetch_backup_plan_associations_for_resource_type" + ] + }, + "FetchDataSourceReferencesForResourceType": { + "methods": [ + "fetch_data_source_references_for_resource_type" + ] + }, "FetchUsableBackupVaults": { "methods": [ "fetch_usable_backup_vaults" @@ -75,6 +85,11 @@ "get_backup_plan_association" ] }, + "GetBackupPlanRevision": { + "methods": [ + "get_backup_plan_revision" + ] + }, "GetBackupVault": { "methods": [ "get_backup_vault" @@ -85,6 +100,11 @@ "get_data_source" ] }, + "GetDataSourceReference": { + "methods": [ + "get_data_source_reference" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" @@ -100,6 +120,11 @@ "list_backup_plan_associations" ] }, + "ListBackupPlanRevisions": { + "methods": [ + "list_backup_plan_revisions" + ] + }, "ListBackupPlans": { "methods": [ "list_backup_plans" @@ -140,6 +165,16 @@ "update_backup" ] }, + "UpdateBackupPlan": { + "methods": [ + "update_backup_plan" + ] + }, + "UpdateBackupPlanAssociation": { + "methods": [ + "update_backup_plan_association" + ] + }, "UpdateBackupVault": { "methods": [ "update_backup_vault" @@ -200,6 +235,16 @@ "delete_management_server" ] }, + "FetchBackupPlanAssociationsForResourceType": { + "methods": [ + "fetch_backup_plan_associations_for_resource_type" + ] + }, + "FetchDataSourceReferencesForResourceType": { + "methods": [ + "fetch_data_source_references_for_resource_type" + ] + }, "FetchUsableBackupVaults": { "methods": [ "fetch_usable_backup_vaults" @@ -220,6 +265,11 @@ "get_backup_plan_association" ] }, + "GetBackupPlanRevision": { + "methods": [ + "get_backup_plan_revision" + ] + }, "GetBackupVault": { "methods": [ "get_backup_vault" @@ -230,6 +280,11 @@ "get_data_source" ] }, + "GetDataSourceReference": { + "methods": [ + "get_data_source_reference" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" @@ -245,6 +300,11 @@ "list_backup_plan_associations" ] }, + "ListBackupPlanRevisions": { + "methods": [ + "list_backup_plan_revisions" + ] + }, "ListBackupPlans": { "methods": [ "list_backup_plans" @@ -285,6 +345,16 @@ "update_backup" ] }, + "UpdateBackupPlan": { + "methods": [ + "update_backup_plan" + ] + }, + "UpdateBackupPlanAssociation": { + "methods": [ + "update_backup_plan_association" + ] + }, "UpdateBackupVault": { "methods": [ "update_backup_vault" @@ -345,6 +415,16 @@ "delete_management_server" ] }, + "FetchBackupPlanAssociationsForResourceType": { + "methods": [ + "fetch_backup_plan_associations_for_resource_type" + ] + }, + "FetchDataSourceReferencesForResourceType": { + "methods": [ + "fetch_data_source_references_for_resource_type" + ] + }, "FetchUsableBackupVaults": { "methods": [ "fetch_usable_backup_vaults" @@ -365,6 +445,11 @@ "get_backup_plan_association" ] }, + "GetBackupPlanRevision": { + "methods": [ + "get_backup_plan_revision" + ] + }, "GetBackupVault": { "methods": [ "get_backup_vault" @@ -375,6 +460,11 @@ "get_data_source" ] }, + "GetDataSourceReference": { + "methods": [ + "get_data_source_reference" + ] + }, "GetManagementServer": { "methods": [ "get_management_server" @@ -390,6 +480,11 @@ "list_backup_plan_associations" ] }, + "ListBackupPlanRevisions": { + "methods": [ + "list_backup_plan_revisions" + ] + }, "ListBackupPlans": { "methods": [ "list_backup_plans" @@ -430,6 +525,16 @@ "update_backup" ] }, + "UpdateBackupPlan": { + "methods": [ + "update_backup_plan" + ] + }, + "UpdateBackupPlanAssociation": { + "methods": [ + "update_backup_plan_association" + ] + }, "UpdateBackupVault": { "methods": [ "update_backup_vault" diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py index 435929ce46e7..20a9cd975b02 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.2.4" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py index 08ca2aaaeb32..97afd8d48bed 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/async_client.py @@ -63,7 +63,10 @@ backupplanassociation, backupvault, backupvault_ba, + backupvault_cloudsql, + backupvault_disk, backupvault_gce, + datasourcereference, ) from .client import BackupDRClient @@ -102,14 +105,26 @@ class BackupDRAsyncClient: parse_backup_plan_association_path = staticmethod( BackupDRClient.parse_backup_plan_association_path ) + backup_plan_revision_path = staticmethod(BackupDRClient.backup_plan_revision_path) + parse_backup_plan_revision_path = staticmethod( + BackupDRClient.parse_backup_plan_revision_path + ) backup_vault_path = staticmethod(BackupDRClient.backup_vault_path) parse_backup_vault_path = staticmethod(BackupDRClient.parse_backup_vault_path) data_source_path = staticmethod(BackupDRClient.data_source_path) parse_data_source_path = staticmethod(BackupDRClient.parse_data_source_path) + data_source_reference_path = staticmethod(BackupDRClient.data_source_reference_path) + parse_data_source_reference_path = staticmethod( + BackupDRClient.parse_data_source_reference_path + ) + instance_path = staticmethod(BackupDRClient.instance_path) + parse_instance_path = staticmethod(BackupDRClient.parse_instance_path) management_server_path = staticmethod(BackupDRClient.management_server_path) parse_management_server_path = staticmethod( BackupDRClient.parse_management_server_path ) + storage_pool_path = staticmethod(BackupDRClient.storage_pool_path) + parse_storage_pool_path = staticmethod(BackupDRClient.parse_storage_pool_path) common_billing_account_path = staticmethod( BackupDRClient.common_billing_account_path ) @@ -2808,6 +2823,160 @@ async def sample_create_backup_plan(): # Done; return the response. return response + async def update_backup_plan( + self, + request: Optional[Union[backupplan.UpdateBackupPlanRequest, dict]] = None, + *, + backup_plan: Optional[backupplan.BackupPlan] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.UpdateBackupPlanRequest( + backup_plan=backup_plan, + ) + + # Make the request + operation = client.update_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupPlanRequest, dict]]): + The request object. Request message for updating a backup + plan. + backup_plan (:class:`google.cloud.backupdr_v1.types.BackupPlan`): + Required. The resource being updated + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Field mask is + used to specify the fields to be overwritten in the + BackupPlan resource by the update. The fields specified + in the update_mask are relative to the resource, not the + full request. A field will be overwritten if it is in + the mask. If the user does not provide a mask then the + request will fail. Currently, these fields are supported + in update: description, schedules, retention period, + adding and removing Backup Rules. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup_plan, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.UpdateBackupPlanRequest): + request = backupplan.UpdateBackupPlanRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_plan is not None: + request.backup_plan = backup_plan + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup_plan + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_plan.name", request.backup_plan.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + async def get_backup_plan( self, request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, @@ -3186,22 +3355,16 @@ async def sample_delete_backup_plan(): # Done; return the response. return response - async def create_backup_plan_association( + async def get_backup_plan_revision( self, - request: Optional[ - Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] - ] = None, + request: Optional[Union[backupplan.GetBackupPlanRevisionRequest, dict]] = None, *, - parent: Optional[str] = None, - backup_plan_association: Optional[ - backupplanassociation.BackupPlanAssociation - ] = None, - backup_plan_association_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation_async.AsyncOperation: - r"""Create a BackupPlanAssociation + ) -> backupplan.BackupPlanRevision: + r"""Gets details of a single BackupPlanRevision. .. code-block:: python @@ -3214,58 +3377,33 @@ async def create_backup_plan_association( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import backupdr_v1 - async def sample_create_backup_plan_association(): + async def sample_get_backup_plan_revision(): # Create a client client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - backup_plan_association = backupdr_v1.BackupPlanAssociation() - backup_plan_association.resource_type = "resource_type_value" - backup_plan_association.resource = "resource_value" - backup_plan_association.backup_plan = "backup_plan_value" - - request = backupdr_v1.CreateBackupPlanAssociationRequest( - parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", - backup_plan_association=backup_plan_association, + request = backupdr_v1.GetBackupPlanRevisionRequest( + name="name_value", ) # Make the request - operation = client.create_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = (await operation).result() + response = await client.get_backup_plan_revision(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]]): - The request object. Request message for creating a backup - plan. - parent (:class:`str`): - Required. The backup plan association project and - location in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR locations map to GCP regions, for example - **us-central1**. + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanRevisionRequest, dict]]): + The request object. The request message for getting a + ``BackupPlanRevision``. + name (:class:`str`): + Required. The resource name of the + ``BackupPlanRevision`` to retrieve. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_association (:class:`google.cloud.backupdr_v1.types.BackupPlanAssociation`): - Required. The resource being created - This corresponds to the ``backup_plan_association`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_association_id (:class:`str`): - Required. The name of the backup plan - association to create. The name must be - unique for the specified project and - location. + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}/revisions/{revision}`` - This corresponds to the ``backup_plan_association_id`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3277,17 +3415,15 @@ async def sample_create_backup_plan_association(): be of type `bytes`. Returns: - google.api_core.operation_async.AsyncOperation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which - contains details like workload, backup plan etc + google.cloud.backupdr_v1.types.BackupPlanRevision: + BackupPlanRevision represents a snapshot of a BackupPlan at a point in + time. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_plan_association, backup_plan_association_id] + flattened_params = [name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -3299,30 +3435,24 @@ async def sample_create_backup_plan_association(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, backupplanassociation.CreateBackupPlanAssociationRequest - ): - request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + if not isinstance(request, backupplan.GetBackupPlanRevisionRequest): + request = backupplan.GetBackupPlanRevisionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if backup_plan_association is not None: - request.backup_plan_association = backup_plan_association - if backup_plan_association_id is not None: - request.backup_plan_association_id = backup_plan_association_id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.create_backup_plan_association + self._client._transport.get_backup_plan_revision ] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Validate the universe domain. @@ -3336,29 +3466,22 @@ async def sample_create_backup_plan_association(): metadata=metadata, ) - # Wrap the response in an operation future. - response = operation_async.from_gapic( - response, - self._client._transport.operations_client, - backupplanassociation.BackupPlanAssociation, - metadata_type=backupdr.OperationMetadata, - ) - # Done; return the response. return response - async def get_backup_plan_association( + async def list_backup_plan_revisions( self, request: Optional[ - Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + Union[backupplan.ListBackupPlanRevisionsRequest, dict] ] = None, *, - name: Optional[str] = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplanassociation.BackupPlanAssociation: - r"""Gets details of a single BackupPlanAssociation. + ) -> pagers.ListBackupPlanRevisionsAsyncPager: + r"""Lists BackupPlanRevisions in a given project and + location. .. code-block:: python @@ -3371,31 +3494,34 @@ async def get_backup_plan_association( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import backupdr_v1 - async def sample_get_backup_plan_association(): + async def sample_list_backup_plan_revisions(): # Create a client client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = backupdr_v1.GetBackupPlanAssociationRequest( - name="name_value", + request = backupdr_v1.ListBackupPlanRevisionsRequest( + parent="parent_value", ) # Make the request - response = await client.get_backup_plan_association(request=request) + page_result = client.list_backup_plan_revisions(request=request) # Handle the response - print(response) + async for response in page_result: + print(response) Args: - request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]]): - The request object. Request message for getting a - BackupPlanAssociation resource. - name (:class:`str`): - Required. Name of the backup plan association resource, - in the format - ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlanRevisionsRequest, dict]]): + The request object. The request message for getting a list of + ``BackupPlanRevision``. + parent (:class:`str`): + Required. The project and location for which to retrieve + ``BackupPlanRevisions`` information. Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}``. + In Cloud BackupDR, locations map to GCP regions, for + e.g. **us-central1**. - This corresponds to the ``name`` field + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, @@ -3407,17 +3533,18 @@ async def sample_get_backup_plan_association(): be of type `bytes`. Returns: - google.cloud.backupdr_v1.types.BackupPlanAssociation: - A BackupPlanAssociation represents a - single BackupPlanAssociation which - contains details like workload, backup - plan etc + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanRevisionsAsyncPager: + The response message for getting a list of + BackupPlanRevision. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [name] + flattened_params = [parent] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -3429,8 +3556,454 @@ async def sample_get_backup_plan_association(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, backupplanassociation.GetBackupPlanAssociationRequest + if not isinstance(request, backupplan.ListBackupPlanRevisionsRequest): + request = backupplan.ListBackupPlanRevisionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plan_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlanRevisionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]]): + The request object. Request message for creating a backup + plan. + parent (:class:`str`): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (:class:`google.cloud.backupdr_v1.types.BackupPlanAssociation`): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (:class:`str`): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_plan_association, backup_plan_association_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.UpdateBackupPlanAssociationRequest, dict] + ] = None, + *, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_update_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.UpdateBackupPlanAssociationRequest( + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.update_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.UpdateBackupPlanAssociationRequest, dict]]): + The request object. Request message for updating a backup + plan association. + backup_plan_association (:class:`google.cloud.backupdr_v1.types.BackupPlanAssociation`): + Required. The resource being updated + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to update. Field mask is + used to specify the fields to be overwritten in the + BackupPlanAssociation resource by the update. The fields + specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. Currently + backup_plan_association.backup_plan is the only + supported field. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup_plan_association, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.UpdateBackupPlanAssociationRequest + ): + request = backupplanassociation.UpdateBackupPlanAssociationRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "backup_plan_association.name", + request.backup_plan_association.name, + ), + ) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.GetBackupPlanAssociationRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backupplanassociation.BackupPlanAssociation: + r"""Gets details of a single BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanAssociationRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_association(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest, dict]]): + The request object. Request message for getting a + BackupPlanAssociation resource. + name (:class:`str`): + Required. Name of the backup plan association resource, + in the format + ``projects/{project}/locations/{location}/backupPlanAssociations/{backupPlanAssociationId}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.types.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.GetBackupPlanAssociationRequest ): request = backupplanassociation.GetBackupPlanAssociationRequest(request) @@ -3465,19 +4038,158 @@ async def sample_get_backup_plan_association(): # Done; return the response. return response - async def list_backup_plan_associations( + async def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupPlanAssociationsAsyncPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]]): + The request object. Request message for List + BackupPlanAssociation + parent (:class:`str`): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBackupPlanAssociationsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_backup_plan_associations_for_resource_type( self, request: Optional[ - Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + Union[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + dict, + ] ] = None, *, parent: Optional[str] = None, + resource_type: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupPlanAssociationsAsyncPager: - r"""Lists BackupPlanAssociations in a given project and - location. + ) -> pagers.FetchBackupPlanAssociationsForResourceTypeAsyncPager: + r"""List BackupPlanAssociations for a given resource + type. .. code-block:: python @@ -3490,37 +4202,44 @@ async def list_backup_plan_associations( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import backupdr_v1 - async def sample_list_backup_plan_associations(): + async def sample_fetch_backup_plan_associations_for_resource_type(): # Create a client client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) - request = backupdr_v1.ListBackupPlanAssociationsRequest( + request = backupdr_v1.FetchBackupPlanAssociationsForResourceTypeRequest( parent="parent_value", + resource_type="resource_type_value", ) # Make the request - page_result = client.list_backup_plan_associations(request=request) + page_result = client.fetch_backup_plan_associations_for_resource_type(request=request) # Handle the response async for response in page_result: print(response) Args: - request (Optional[Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]]): - The request object. Request message for List - BackupPlanAssociation + request (Optional[Union[google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeRequest, dict]]): + The request object. Request for the + FetchBackupPlanAssociationsForResourceType + method. parent (:class:`str`): - Required. The project and location for which to retrieve - backup Plan Associations information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve backup plan associations - for all locations, use "-" for the ``{location}`` value. + Required. The parent resource name. + Format: + projects/{project}/locations/{location} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + resource_type (:class:`str`): + Required. The type of the GCP + resource. Ex: + sql.googleapis.com/Instance + + This corresponds to the ``resource_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3530,18 +4249,18 @@ async def sample_list_backup_plan_associations(): be of type `bytes`. Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager: - Response message for List - BackupPlanAssociation - Iterating over this object will yield - results and resolve additional pages - automatically. + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchBackupPlanAssociationsForResourceTypeAsyncPager: + Response for the + FetchBackupPlanAssociationsForResourceType + method. Iterating over this object will + yield results and resolve additional + pages automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [parent] + flattened_params = [parent, resource_type] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -3554,19 +4273,26 @@ async def sample_list_backup_plan_associations(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, backupplanassociation.ListBackupPlanAssociationsRequest + request, + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, ): - request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( + request + ) + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if resource_type is not None: + request.resource_type = resource_type # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._client._transport._wrapped_methods[ - self._client._transport.list_backup_plan_associations + self._client._transport.fetch_backup_plan_associations_for_resource_type ] # Certain fields should be provided within the metadata header; @@ -3588,7 +4314,7 @@ async def sample_list_backup_plan_associations(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers.ListBackupPlanAssociationsAsyncPager( + response = pagers.FetchBackupPlanAssociationsForResourceTypeAsyncPager( method=rpc, request=request, response=response, @@ -3877,6 +4603,272 @@ async def sample_trigger_backup(): # Done; return the response. return response + async def get_data_source_reference( + self, + request: Optional[ + Union[datasourcereference.GetDataSourceReferenceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datasourcereference.DataSourceReference: + r"""Gets details of a single DataSourceReference. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_get_data_source_reference(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceReferenceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source_reference(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.GetDataSourceReferenceRequest, dict]]): + The request object. Request for the + GetDataSourceReference method. + name (:class:`str`): + Required. The name of the DataSourceReference to + retrieve. Format: + projects/{project}/locations/{location}/dataSourceReferences/{data_source_reference} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.types.DataSourceReference: + DataSourceReference is a reference to + a DataSource resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasourcereference.GetDataSourceReferenceRequest): + request = datasourcereference.GetDataSourceReferenceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_source_reference + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_data_source_references_for_resource_type( + self, + request: Optional[ + Union[ + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + resource_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.FetchDataSourceReferencesForResourceTypeAsyncPager: + r"""Fetch DataSourceReferences for a given project, + location and resource type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + async def sample_fetch_data_source_references_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchDataSourceReferencesForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_data_source_references_for_resource_type(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeRequest, dict]]): + The request object. Request for the + FetchDataSourceReferencesForResourceType + method. + parent (:class:`str`): + Required. The parent resource name. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_type (:class:`str`): + Required. The type of the GCP + resource. Ex: + sql.googleapis.com/Instance + + This corresponds to the ``resource_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchDataSourceReferencesForResourceTypeAsyncPager: + Response for the + FetchDataSourceReferencesForResourceType + method. Iterating over this object will + yield results and resolve additional + pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, resource_type] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, datasourcereference.FetchDataSourceReferencesForResourceTypeRequest + ): + request = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest( + request + ) + ) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if resource_type is not None: + request.resource_type = resource_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.fetch_data_source_references_for_resource_type + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchDataSourceReferencesForResourceTypeAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def initialize_service( self, request: Optional[Union[backupdr.InitializeServiceRequest, dict]] = None, @@ -3903,7 +4895,11 @@ async def sample_initialize_service(): client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) + cloud_sql_instance_initialization_config = backupdr_v1.CloudSqlInstanceInitializationConfig() + cloud_sql_instance_initialization_config.edition = "ENTERPRISE_PLUS" + request = backupdr_v1.InitializeServiceRequest( + cloud_sql_instance_initialization_config=cloud_sql_instance_initialization_config, name="name_value", resource_type="resource_type_value", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py index 55233b461087..427d7dbce7c8 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/client.py @@ -80,7 +80,10 @@ backupplanassociation, backupvault, backupvault_ba, + backupvault_cloudsql, + backupvault_disk, backupvault_gce, + datasourcereference, ) from .transports.base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -285,6 +288,30 @@ def parse_backup_plan_association_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def backup_plan_revision_path( + project: str, + location: str, + backup_plan: str, + revision: str, + ) -> str: + """Returns a fully-qualified backup_plan_revision string.""" + return "projects/{project}/locations/{location}/backupPlans/{backup_plan}/revisions/{revision}".format( + project=project, + location=location, + backup_plan=backup_plan, + revision=revision, + ) + + @staticmethod + def parse_backup_plan_revision_path(path: str) -> Dict[str, str]: + """Parses a backup_plan_revision path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/backupPlans/(?P.+?)/revisions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def backup_vault_path( project: str, @@ -333,6 +360,45 @@ def parse_data_source_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def data_source_reference_path( + project: str, + location: str, + data_source_reference: str, + ) -> str: + """Returns a fully-qualified data_source_reference string.""" + return "projects/{project}/locations/{location}/dataSourceReferences/{data_source_reference}".format( + project=project, + location=location, + data_source_reference=data_source_reference, + ) + + @staticmethod + def parse_data_source_reference_path(path: str) -> Dict[str, str]: + """Parses a data_source_reference path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataSourceReferences/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def instance_path( + project: str, + instance: str, + ) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/instances/{instance}".format( + project=project, + instance=instance, + ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str, str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def management_server_path( project: str, @@ -355,6 +421,28 @@ def parse_management_server_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def storage_pool_path( + project: str, + zone: str, + storage_pool: str, + ) -> str: + """Returns a fully-qualified storage_pool string.""" + return "projects/{project}/zones/{zone}/storagePools/{storage_pool}".format( + project=project, + zone=zone, + storage_pool=storage_pool, + ) + + @staticmethod + def parse_storage_pool_path(path: str) -> Dict[str, str]: + """Parses a storage_pool path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/zones/(?P.+?)/storagePools/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -3283,6 +3371,157 @@ def sample_create_backup_plan(): # Done; return the response. return response + def update_backup_plan( + self, + request: Optional[Union[backupplan.UpdateBackupPlanRequest, dict]] = None, + *, + backup_plan: Optional[backupplan.BackupPlan] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update a BackupPlan. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.UpdateBackupPlanRequest( + backup_plan=backup_plan, + ) + + # Make the request + operation = client.update_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupPlanRequest, dict]): + The request object. Request message for updating a backup + plan. + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The resource being updated + This corresponds to the ``backup_plan`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Field mask is + used to specify the fields to be overwritten in the + BackupPlan resource by the update. The fields specified + in the update_mask are relative to the resource, not the + full request. A field will be overwritten if it is in + the mask. If the user does not provide a mask then the + request will fail. Currently, these fields are supported + in update: description, schedules, retention period, + adding and removing Backup Rules. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlan` A BackupPlan specifies some common fields, such as description as well + as one or more BackupRule messages. Each BackupRule + has a retention policy and defines a schedule by + which the system is to perform backup workloads. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup_plan, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.UpdateBackupPlanRequest): + request = backupplan.UpdateBackupPlanRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_plan is not None: + request.backup_plan = backup_plan + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_backup_plan] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("backup_plan.name", request.backup_plan.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplan.BackupPlan, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + def get_backup_plan( self, request: Optional[Union[backupplan.GetBackupPlanRequest, dict]] = None, @@ -3652,22 +3891,16 @@ def sample_delete_backup_plan(): # Done; return the response. return response - def create_backup_plan_association( + def get_backup_plan_revision( self, - request: Optional[ - Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] - ] = None, + request: Optional[Union[backupplan.GetBackupPlanRevisionRequest, dict]] = None, *, - parent: Optional[str] = None, - backup_plan_association: Optional[ - backupplanassociation.BackupPlanAssociation - ] = None, - backup_plan_association_id: Optional[str] = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operation.Operation: - r"""Create a BackupPlanAssociation + ) -> backupplan.BackupPlanRevision: + r"""Gets details of a single BackupPlanRevision. .. code-block:: python @@ -3680,58 +3913,33 @@ def create_backup_plan_association( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import backupdr_v1 - def sample_create_backup_plan_association(): + def sample_get_backup_plan_revision(): # Create a client client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - backup_plan_association = backupdr_v1.BackupPlanAssociation() - backup_plan_association.resource_type = "resource_type_value" - backup_plan_association.resource = "resource_value" - backup_plan_association.backup_plan = "backup_plan_value" - - request = backupdr_v1.CreateBackupPlanAssociationRequest( - parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", - backup_plan_association=backup_plan_association, + request = backupdr_v1.GetBackupPlanRevisionRequest( + name="name_value", ) # Make the request - operation = client.create_backup_plan_association(request=request) - - print("Waiting for operation to complete...") - - response = operation.result() + response = client.get_backup_plan_revision(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]): - The request object. Request message for creating a backup - plan. - parent (str): - Required. The backup plan association project and - location in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR locations map to GCP regions, for example - **us-central1**. + request (Union[google.cloud.backupdr_v1.types.GetBackupPlanRevisionRequest, dict]): + The request object. The request message for getting a + ``BackupPlanRevision``. + name (str): + Required. The resource name of the + ``BackupPlanRevision`` to retrieve. - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): - Required. The resource being created - This corresponds to the ``backup_plan_association`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - backup_plan_association_id (str): - Required. The name of the backup plan - association to create. The name must be - unique for the specified project and - location. + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}/revisions/{revision}`` - This corresponds to the ``backup_plan_association_id`` field + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -3743,17 +3951,15 @@ def sample_create_backup_plan_association(): be of type `bytes`. Returns: - google.api_core.operation.Operation: - An object representing a long-running operation. - - The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which - contains details like workload, backup plan etc + google.cloud.backupdr_v1.types.BackupPlanRevision: + BackupPlanRevision represents a snapshot of a BackupPlan at a point in + time. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [parent, backup_plan_association, backup_plan_association_id] + flattened_params = [name] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -3765,29 +3971,461 @@ def sample_create_backup_plan_association(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. - if not isinstance( - request, backupplanassociation.CreateBackupPlanAssociationRequest - ): - request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + if not isinstance(request, backupplan.GetBackupPlanRevisionRequest): + request = backupplan.GetBackupPlanRevisionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: - request.parent = parent - if backup_plan_association is not None: - request.backup_plan_association = backup_plan_association - if backup_plan_association_id is not None: - request.backup_plan_association_id = backup_plan_association_id + if name is not None: + request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.create_backup_plan_association - ] + rpc = self._transport._wrapped_methods[self._transport.get_backup_plan_revision] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plan_revisions( + self, + request: Optional[ + Union[backupplan.ListBackupPlanRevisionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupPlanRevisionsPager: + r"""Lists BackupPlanRevisions in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plan_revisions(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlanRevisionsRequest, dict]): + The request object. The request message for getting a list of + ``BackupPlanRevision``. + parent (str): + Required. The project and location for which to retrieve + ``BackupPlanRevisions`` information. Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}``. + In Cloud BackupDR, locations map to GCP regions, for + e.g. **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanRevisionsPager: + The response message for getting a list of + BackupPlanRevision. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, backupplan.ListBackupPlanRevisionsRequest): + request = backupplan.ListBackupPlanRevisionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_backup_plan_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlanRevisionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.CreateBackupPlanAssociationRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + backup_plan_association_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Create a BackupPlanAssociation + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_create_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.create_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.CreateBackupPlanAssociationRequest, dict]): + The request object. Request message for creating a backup + plan. + parent (str): + Required. The backup plan association project and + location in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR locations map to GCP regions, for example + **us-central1**. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being created + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backup_plan_association_id (str): + Required. The name of the backup plan + association to create. The name must be + unique for the specified project and + location. + + This corresponds to the ``backup_plan_association_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, backup_plan_association, backup_plan_association_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.CreateBackupPlanAssociationRequest + ): + request = backupplanassociation.CreateBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if backup_plan_association_id is not None: + request.backup_plan_association_id = backup_plan_association_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + backupplanassociation.BackupPlanAssociation, + metadata_type=backupdr.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_backup_plan_association( + self, + request: Optional[ + Union[backupplanassociation.UpdateBackupPlanAssociationRequest, dict] + ] = None, + *, + backup_plan_association: Optional[ + backupplanassociation.BackupPlanAssociation + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Update a BackupPlanAssociation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_update_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.UpdateBackupPlanAssociationRequest( + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.update_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.UpdateBackupPlanAssociationRequest, dict]): + The request object. Request message for updating a backup + plan association. + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being updated + This corresponds to the ``backup_plan_association`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Field mask is + used to specify the fields to be overwritten in the + BackupPlanAssociation resource by the update. The fields + specified in the update_mask are relative to the + resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then the request will fail. Currently + backup_plan_association.backup_plan is the only + supported field. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.backupdr_v1.types.BackupPlanAssociation` A BackupPlanAssociation represents a single BackupPlanAssociation which + contains details like workload, backup plan etc + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [backup_plan_association, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.UpdateBackupPlanAssociationRequest + ): + request = backupplanassociation.UpdateBackupPlanAssociationRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if backup_plan_association is not None: + request.backup_plan_association = backup_plan_association + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_backup_plan_association + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ( + "backup_plan_association.name", + request.backup_plan_association.name, + ), + ) + ), ) # Validate the universe domain. @@ -3918,9 +4556,143 @@ def sample_get_backup_plan_association(): # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_backup_plan_associations( + self, + request: Optional[ + Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBackupPlanAssociationsPager: + r"""Lists BackupPlanAssociations in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_list_backup_plan_associations(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanAssociationsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_associations(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]): + The request object. Request message for List + BackupPlanAssociation + parent (str): + Required. The project and location for which to retrieve + backup Plan Associations information, in the format + ``projects/{project_id}/locations/{location}``. In Cloud + BackupDR, locations map to GCP regions, for example + **us-central1**. To retrieve backup plan associations + for all locations, use "-" for the ``{location}`` value. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager: + Response message for List + BackupPlanAssociation + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, backupplanassociation.ListBackupPlanAssociationsRequest + ): + request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_backup_plan_associations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBackupPlanAssociationsPager( + method=rpc, + request=request, + response=response, retry=retry, timeout=timeout, metadata=metadata, @@ -3929,19 +4701,23 @@ def sample_get_backup_plan_association(): # Done; return the response. return response - def list_backup_plan_associations( + def fetch_backup_plan_associations_for_resource_type( self, request: Optional[ - Union[backupplanassociation.ListBackupPlanAssociationsRequest, dict] + Union[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + dict, + ] ] = None, *, parent: Optional[str] = None, + resource_type: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers.ListBackupPlanAssociationsPager: - r"""Lists BackupPlanAssociations in a given project and - location. + ) -> pagers.FetchBackupPlanAssociationsForResourceTypePager: + r"""List BackupPlanAssociations for a given resource + type. .. code-block:: python @@ -3954,37 +4730,44 @@ def list_backup_plan_associations( # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import backupdr_v1 - def sample_list_backup_plan_associations(): + def sample_fetch_backup_plan_associations_for_resource_type(): # Create a client client = backupdr_v1.BackupDRClient() # Initialize request argument(s) - request = backupdr_v1.ListBackupPlanAssociationsRequest( + request = backupdr_v1.FetchBackupPlanAssociationsForResourceTypeRequest( parent="parent_value", + resource_type="resource_type_value", ) # Make the request - page_result = client.list_backup_plan_associations(request=request) + page_result = client.fetch_backup_plan_associations_for_resource_type(request=request) # Handle the response for response in page_result: print(response) Args: - request (Union[google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest, dict]): - The request object. Request message for List - BackupPlanAssociation + request (Union[google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeRequest, dict]): + The request object. Request for the + FetchBackupPlanAssociationsForResourceType + method. parent (str): - Required. The project and location for which to retrieve - backup Plan Associations information, in the format - ``projects/{project_id}/locations/{location}``. In Cloud - BackupDR, locations map to GCP regions, for example - **us-central1**. To retrieve backup plan associations - for all locations, use "-" for the ``{location}`` value. + Required. The parent resource name. + Format: + projects/{project}/locations/{location} This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + resource_type (str): + Required. The type of the GCP + resource. Ex: + sql.googleapis.com/Instance + + This corresponds to the ``resource_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3994,18 +4777,18 @@ def sample_list_backup_plan_associations(): be of type `bytes`. Returns: - google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager: - Response message for List - BackupPlanAssociation - Iterating over this object will yield - results and resolve additional pages - automatically. + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchBackupPlanAssociationsForResourceTypePager: + Response for the + FetchBackupPlanAssociationsForResourceType + method. Iterating over this object will + yield results and resolve additional + pages automatically. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - flattened_params = [parent] + flattened_params = [parent, resource_type] has_flattened_params = ( len([param for param in flattened_params if param is not None]) > 0 ) @@ -4018,18 +4801,25 @@ def sample_list_backup_plan_associations(): # - Use the request object if provided (there's no risk of modifying the input as # there are no flattened fields), or create one. if not isinstance( - request, backupplanassociation.ListBackupPlanAssociationsRequest + request, + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, ): - request = backupplanassociation.ListBackupPlanAssociationsRequest(request) + request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( + request + ) + ) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if resource_type is not None: + request.resource_type = resource_type # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[ - self._transport.list_backup_plan_associations + self._transport.fetch_backup_plan_associations_for_resource_type ] # Certain fields should be provided within the metadata header; @@ -4051,7 +4841,7 @@ def sample_list_backup_plan_associations(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers.ListBackupPlanAssociationsPager( + response = pagers.FetchBackupPlanAssociationsForResourceTypePager( method=rpc, request=request, response=response, @@ -4336,6 +5126,270 @@ def sample_trigger_backup(): # Done; return the response. return response + def get_data_source_reference( + self, + request: Optional[ + Union[datasourcereference.GetDataSourceReferenceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datasourcereference.DataSourceReference: + r"""Gets details of a single DataSourceReference. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_get_data_source_reference(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceReferenceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source_reference(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.GetDataSourceReferenceRequest, dict]): + The request object. Request for the + GetDataSourceReference method. + name (str): + Required. The name of the DataSourceReference to + retrieve. Format: + projects/{project}/locations/{location}/dataSourceReferences/{data_source_reference} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.types.DataSourceReference: + DataSourceReference is a reference to + a DataSource resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datasourcereference.GetDataSourceReferenceRequest): + request = datasourcereference.GetDataSourceReferenceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_data_source_reference + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_data_source_references_for_resource_type( + self, + request: Optional[ + Union[ + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, + dict, + ] + ] = None, + *, + parent: Optional[str] = None, + resource_type: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.FetchDataSourceReferencesForResourceTypePager: + r"""Fetch DataSourceReferences for a given project, + location and resource type. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import backupdr_v1 + + def sample_fetch_data_source_references_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchDataSourceReferencesForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_data_source_references_for_resource_type(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeRequest, dict]): + The request object. Request for the + FetchDataSourceReferencesForResourceType + method. + parent (str): + Required. The parent resource name. + Format: + projects/{project}/locations/{location} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource_type (str): + Required. The type of the GCP + resource. Ex: + sql.googleapis.com/Instance + + This corresponds to the ``resource_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.backupdr_v1.services.backup_dr.pagers.FetchDataSourceReferencesForResourceTypePager: + Response for the + FetchDataSourceReferencesForResourceType + method. Iterating over this object will + yield results and resolve additional + pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, resource_type] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, datasourcereference.FetchDataSourceReferencesForResourceTypeRequest + ): + request = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest( + request + ) + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if resource_type is not None: + request.resource_type = resource_type + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.fetch_data_source_references_for_resource_type + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchDataSourceReferencesForResourceTypePager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def initialize_service( self, request: Optional[Union[backupdr.InitializeServiceRequest, dict]] = None, @@ -4362,7 +5416,11 @@ def sample_initialize_service(): client = backupdr_v1.BackupDRClient() # Initialize request argument(s) + cloud_sql_instance_initialization_config = backupdr_v1.CloudSqlInstanceInitializationConfig() + cloud_sql_instance_initialization_config.edition = "ENTERPRISE_PLUS" + request = backupdr_v1.InitializeServiceRequest( + cloud_sql_instance_initialization_config=cloud_sql_instance_initialization_config, name="name_value", resource_type="resource_type_value", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py index 1f4640cab0e5..0249a8066846 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/pagers.py @@ -43,6 +43,7 @@ backupplan, backupplanassociation, backupvault, + datasourcereference, ) @@ -982,6 +983,162 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class ListBackupPlanRevisionsPager: + """A pager for iterating through ``list_backup_plan_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanRevisionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plan_revisions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBackupPlanRevisions`` requests and continue to iterate + through the ``backup_plan_revisions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., backupplan.ListBackupPlanRevisionsResponse], + request: backupplan.ListBackupPlanRevisionsRequest, + response: backupplan.ListBackupPlanRevisionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanRevisionsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanRevisionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backupplan.ListBackupPlanRevisionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[backupplan.ListBackupPlanRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplan.BackupPlanRevision]: + for page in self.pages: + yield from page.backup_plan_revisions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListBackupPlanRevisionsAsyncPager: + """A pager for iterating through ``list_backup_plan_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.ListBackupPlanRevisionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plan_revisions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBackupPlanRevisions`` requests and continue to iterate + through the ``backup_plan_revisions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.ListBackupPlanRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[backupplan.ListBackupPlanRevisionsResponse]], + request: backupplan.ListBackupPlanRevisionsRequest, + response: backupplan.ListBackupPlanRevisionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.ListBackupPlanRevisionsRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.ListBackupPlanRevisionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = backupplan.ListBackupPlanRevisionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[backupplan.ListBackupPlanRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplan.BackupPlanRevision]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plan_revisions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListBackupPlanAssociationsPager: """A pager for iterating through ``list_backup_plan_associations`` requests. @@ -1142,3 +1299,356 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchBackupPlanAssociationsForResourceTypePager: + """A pager for iterating through ``fetch_backup_plan_associations_for_resource_type`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeResponse` object, and + provides an ``__iter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchBackupPlanAssociationsForResourceType`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + ], + request: backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + response: backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( + request + ) + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[backupplanassociation.BackupPlanAssociation]: + for page in self.pages: + yield from page.backup_plan_associations + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchBackupPlanAssociationsForResourceTypeAsyncPager: + """A pager for iterating through ``fetch_backup_plan_associations_for_resource_type`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``backup_plan_associations`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchBackupPlanAssociationsForResourceType`` requests and continue to iterate + through the ``backup_plan_associations`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse + ], + ], + request: backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + response: backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( + request + ) + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[backupplanassociation.BackupPlanAssociation]: + async def async_generator(): + async for page in self.pages: + for response in page.backup_plan_associations: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchDataSourceReferencesForResourceTypePager: + """A pager for iterating through ``fetch_data_source_references_for_resource_type`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_source_references`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchDataSourceReferencesForResourceType`` requests and continue to iterate + through the ``data_source_references`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., datasourcereference.FetchDataSourceReferencesForResourceTypeResponse + ], + request: datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, + response: datasourcereference.FetchDataSourceReferencesForResourceTypeResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest(request) + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[datasourcereference.FetchDataSourceReferencesForResourceTypeResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[datasourcereference.DataSourceReference]: + for page in self.pages: + yield from page.data_source_references + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchDataSourceReferencesForResourceTypeAsyncPager: + """A pager for iterating through ``fetch_data_source_references_for_resource_type`` requests. + + This class thinly wraps an initial + :class:`google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_source_references`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchDataSourceReferencesForResourceType`` requests and continue to iterate + through the ``data_source_references`` field on the + corresponding responses. + + All the usual :class:`google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., + Awaitable[ + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse + ], + ], + request: datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, + response: datasourcereference.FetchDataSourceReferencesForResourceTypeResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeRequest): + The initial request object. + response (google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest(request) + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[ + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse + ]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[datasourcereference.DataSourceReference]: + async def async_generator(): + async for page in self.pages: + for response in page.data_source_references: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py index 10252dfcf5e5..fc26866a98b3 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/base.py @@ -35,6 +35,7 @@ backupplan, backupplanassociation, backupvault, + datasourcereference, ) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -335,6 +336,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.update_backup_plan: gapic_v1.method.wrap_method( + self.update_backup_plan, + default_timeout=None, + client_info=client_info, + ), self.get_backup_plan: gapic_v1.method.wrap_method( self.get_backup_plan, default_timeout=None, @@ -350,11 +356,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_backup_plan_revision: gapic_v1.method.wrap_method( + self.get_backup_plan_revision, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_revisions: gapic_v1.method.wrap_method( + self.list_backup_plan_revisions, + default_timeout=None, + client_info=client_info, + ), self.create_backup_plan_association: gapic_v1.method.wrap_method( self.create_backup_plan_association, default_timeout=None, client_info=client_info, ), + self.update_backup_plan_association: gapic_v1.method.wrap_method( + self.update_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), self.get_backup_plan_association: gapic_v1.method.wrap_method( self.get_backup_plan_association, default_timeout=None, @@ -365,6 +386,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.fetch_backup_plan_associations_for_resource_type: gapic_v1.method.wrap_method( + self.fetch_backup_plan_associations_for_resource_type, + default_timeout=None, + client_info=client_info, + ), self.delete_backup_plan_association: gapic_v1.method.wrap_method( self.delete_backup_plan_association, default_timeout=None, @@ -375,6 +401,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_data_source_reference: gapic_v1.method.wrap_method( + self.get_data_source_reference, + default_timeout=None, + client_info=client_info, + ), + self.fetch_data_source_references_for_resource_type: gapic_v1.method.wrap_method( + self.fetch_data_source_references_for_resource_type, + default_timeout=None, + client_info=client_info, + ), self.initialize_service: gapic_v1.method.wrap_method( self.initialize_service, default_retry=retries.Retry( @@ -635,6 +671,15 @@ def create_backup_plan( ]: raise NotImplementedError() + @property + def update_backup_plan( + self, + ) -> Callable[ + [backupplan.UpdateBackupPlanRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_backup_plan( self, @@ -665,6 +710,27 @@ def delete_backup_plan( ]: raise NotImplementedError() + @property + def get_backup_plan_revision( + self, + ) -> Callable[ + [backupplan.GetBackupPlanRevisionRequest], + Union[backupplan.BackupPlanRevision, Awaitable[backupplan.BackupPlanRevision]], + ]: + raise NotImplementedError() + + @property + def list_backup_plan_revisions( + self, + ) -> Callable[ + [backupplan.ListBackupPlanRevisionsRequest], + Union[ + backupplan.ListBackupPlanRevisionsResponse, + Awaitable[backupplan.ListBackupPlanRevisionsResponse], + ], + ]: + raise NotImplementedError() + @property def create_backup_plan_association( self, @@ -674,6 +740,15 @@ def create_backup_plan_association( ]: raise NotImplementedError() + @property + def update_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.UpdateBackupPlanAssociationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_backup_plan_association( self, @@ -698,6 +773,20 @@ def list_backup_plan_associations( ]: raise NotImplementedError() + @property + def fetch_backup_plan_associations_for_resource_type( + self, + ) -> Callable[ + [backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest], + Union[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + Awaitable[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse + ], + ], + ]: + raise NotImplementedError() + @property def delete_backup_plan_association( self, @@ -716,6 +805,32 @@ def trigger_backup( ]: raise NotImplementedError() + @property + def get_data_source_reference( + self, + ) -> Callable[ + [datasourcereference.GetDataSourceReferenceRequest], + Union[ + datasourcereference.DataSourceReference, + Awaitable[datasourcereference.DataSourceReference], + ], + ]: + raise NotImplementedError() + + @property + def fetch_data_source_references_for_resource_type( + self, + ) -> Callable[ + [datasourcereference.FetchDataSourceReferencesForResourceTypeRequest], + Union[ + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse, + Awaitable[ + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse + ], + ], + ]: + raise NotImplementedError() + @property def initialize_service( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py index 225f1db0417d..62dcf4675f12 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc.py @@ -37,6 +37,7 @@ backupplan, backupplanassociation, backupvault, + datasourcereference, ) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -855,6 +856,32 @@ def create_backup_plan( ) return self._stubs["create_backup_plan"] + @property + def update_backup_plan( + self, + ) -> Callable[[backupplan.UpdateBackupPlanRequest], operations_pb2.Operation]: + r"""Return a callable for the update backup plan method over gRPC. + + Update a BackupPlan. + + Returns: + Callable[[~.UpdateBackupPlanRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_plan" not in self._stubs: + self._stubs["update_backup_plan"] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupPlan", + request_serializer=backupplan.UpdateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_plan"] + @property def get_backup_plan( self, @@ -935,6 +962,66 @@ def delete_backup_plan( ) return self._stubs["delete_backup_plan"] + @property + def get_backup_plan_revision( + self, + ) -> Callable[ + [backupplan.GetBackupPlanRevisionRequest], backupplan.BackupPlanRevision + ]: + r"""Return a callable for the get backup plan revision method over gRPC. + + Gets details of a single BackupPlanRevision. + + Returns: + Callable[[~.GetBackupPlanRevisionRequest], + ~.BackupPlanRevision]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_revision" not in self._stubs: + self._stubs["get_backup_plan_revision"] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanRevision", + request_serializer=backupplan.GetBackupPlanRevisionRequest.serialize, + response_deserializer=backupplan.BackupPlanRevision.deserialize, + ) + return self._stubs["get_backup_plan_revision"] + + @property + def list_backup_plan_revisions( + self, + ) -> Callable[ + [backupplan.ListBackupPlanRevisionsRequest], + backupplan.ListBackupPlanRevisionsResponse, + ]: + r"""Return a callable for the list backup plan revisions method over gRPC. + + Lists BackupPlanRevisions in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanRevisionsRequest], + ~.ListBackupPlanRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_revisions" not in self._stubs: + self._stubs[ + "list_backup_plan_revisions" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanRevisions", + request_serializer=backupplan.ListBackupPlanRevisionsRequest.serialize, + response_deserializer=backupplan.ListBackupPlanRevisionsResponse.deserialize, + ) + return self._stubs["list_backup_plan_revisions"] + @property def create_backup_plan_association( self, @@ -966,6 +1053,37 @@ def create_backup_plan_association( ) return self._stubs["create_backup_plan_association"] + @property + def update_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.UpdateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + r"""Return a callable for the update backup plan association method over gRPC. + + Update a BackupPlanAssociation. + + Returns: + Callable[[~.UpdateBackupPlanAssociationRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_plan_association" not in self._stubs: + self._stubs[ + "update_backup_plan_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupPlanAssociation", + request_serializer=backupplanassociation.UpdateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_plan_association"] + @property def get_backup_plan_association( self, @@ -1029,6 +1147,39 @@ def list_backup_plan_associations( ) return self._stubs["list_backup_plan_associations"] + @property + def fetch_backup_plan_associations_for_resource_type( + self, + ) -> Callable[ + [backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest], + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + ]: + r"""Return a callable for the fetch backup plan associations + for resource type method over gRPC. + + List BackupPlanAssociations for a given resource + type. + + Returns: + Callable[[~.FetchBackupPlanAssociationsForResourceTypeRequest], + ~.FetchBackupPlanAssociationsForResourceTypeResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_backup_plan_associations_for_resource_type" not in self._stubs: + self._stubs[ + "fetch_backup_plan_associations_for_resource_type" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchBackupPlanAssociationsForResourceType", + request_serializer=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest.serialize, + response_deserializer=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.deserialize, + ) + return self._stubs["fetch_backup_plan_associations_for_resource_type"] + @property def delete_backup_plan_association( self, @@ -1088,6 +1239,68 @@ def trigger_backup( ) return self._stubs["trigger_backup"] + @property + def get_data_source_reference( + self, + ) -> Callable[ + [datasourcereference.GetDataSourceReferenceRequest], + datasourcereference.DataSourceReference, + ]: + r"""Return a callable for the get data source reference method over gRPC. + + Gets details of a single DataSourceReference. + + Returns: + Callable[[~.GetDataSourceReferenceRequest], + ~.DataSourceReference]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source_reference" not in self._stubs: + self._stubs["get_data_source_reference"] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSourceReference", + request_serializer=datasourcereference.GetDataSourceReferenceRequest.serialize, + response_deserializer=datasourcereference.DataSourceReference.deserialize, + ) + return self._stubs["get_data_source_reference"] + + @property + def fetch_data_source_references_for_resource_type( + self, + ) -> Callable[ + [datasourcereference.FetchDataSourceReferencesForResourceTypeRequest], + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse, + ]: + r"""Return a callable for the fetch data source references + for resource type method over gRPC. + + Fetch DataSourceReferences for a given project, + location and resource type. + + Returns: + Callable[[~.FetchDataSourceReferencesForResourceTypeRequest], + ~.FetchDataSourceReferencesForResourceTypeResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_data_source_references_for_resource_type" not in self._stubs: + self._stubs[ + "fetch_data_source_references_for_resource_type" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchDataSourceReferencesForResourceType", + request_serializer=datasourcereference.FetchDataSourceReferencesForResourceTypeRequest.serialize, + response_deserializer=datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.deserialize, + ) + return self._stubs["fetch_data_source_references_for_resource_type"] + @property def initialize_service( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py index 368d5f575f5b..589041037e46 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/grpc_asyncio.py @@ -40,6 +40,7 @@ backupplan, backupplanassociation, backupvault, + datasourcereference, ) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -894,6 +895,34 @@ def create_backup_plan( ) return self._stubs["create_backup_plan"] + @property + def update_backup_plan( + self, + ) -> Callable[ + [backupplan.UpdateBackupPlanRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update backup plan method over gRPC. + + Update a BackupPlan. + + Returns: + Callable[[~.UpdateBackupPlanRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_plan" not in self._stubs: + self._stubs["update_backup_plan"] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupPlan", + request_serializer=backupplan.UpdateBackupPlanRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_plan"] + @property def get_backup_plan( self, @@ -977,6 +1006,67 @@ def delete_backup_plan( ) return self._stubs["delete_backup_plan"] + @property + def get_backup_plan_revision( + self, + ) -> Callable[ + [backupplan.GetBackupPlanRevisionRequest], + Awaitable[backupplan.BackupPlanRevision], + ]: + r"""Return a callable for the get backup plan revision method over gRPC. + + Gets details of a single BackupPlanRevision. + + Returns: + Callable[[~.GetBackupPlanRevisionRequest], + Awaitable[~.BackupPlanRevision]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_backup_plan_revision" not in self._stubs: + self._stubs["get_backup_plan_revision"] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetBackupPlanRevision", + request_serializer=backupplan.GetBackupPlanRevisionRequest.serialize, + response_deserializer=backupplan.BackupPlanRevision.deserialize, + ) + return self._stubs["get_backup_plan_revision"] + + @property + def list_backup_plan_revisions( + self, + ) -> Callable[ + [backupplan.ListBackupPlanRevisionsRequest], + Awaitable[backupplan.ListBackupPlanRevisionsResponse], + ]: + r"""Return a callable for the list backup plan revisions method over gRPC. + + Lists BackupPlanRevisions in a given project and + location. + + Returns: + Callable[[~.ListBackupPlanRevisionsRequest], + Awaitable[~.ListBackupPlanRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_backup_plan_revisions" not in self._stubs: + self._stubs[ + "list_backup_plan_revisions" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/ListBackupPlanRevisions", + request_serializer=backupplan.ListBackupPlanRevisionsRequest.serialize, + response_deserializer=backupplan.ListBackupPlanRevisionsResponse.deserialize, + ) + return self._stubs["list_backup_plan_revisions"] + @property def create_backup_plan_association( self, @@ -1008,6 +1098,37 @@ def create_backup_plan_association( ) return self._stubs["create_backup_plan_association"] + @property + def update_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.UpdateBackupPlanAssociationRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update backup plan association method over gRPC. + + Update a BackupPlanAssociation. + + Returns: + Callable[[~.UpdateBackupPlanAssociationRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_backup_plan_association" not in self._stubs: + self._stubs[ + "update_backup_plan_association" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/UpdateBackupPlanAssociation", + request_serializer=backupplanassociation.UpdateBackupPlanAssociationRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_backup_plan_association"] + @property def get_backup_plan_association( self, @@ -1071,6 +1192,41 @@ def list_backup_plan_associations( ) return self._stubs["list_backup_plan_associations"] + @property + def fetch_backup_plan_associations_for_resource_type( + self, + ) -> Callable[ + [backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest], + Awaitable[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse + ], + ]: + r"""Return a callable for the fetch backup plan associations + for resource type method over gRPC. + + List BackupPlanAssociations for a given resource + type. + + Returns: + Callable[[~.FetchBackupPlanAssociationsForResourceTypeRequest], + Awaitable[~.FetchBackupPlanAssociationsForResourceTypeResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_backup_plan_associations_for_resource_type" not in self._stubs: + self._stubs[ + "fetch_backup_plan_associations_for_resource_type" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchBackupPlanAssociationsForResourceType", + request_serializer=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest.serialize, + response_deserializer=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.deserialize, + ) + return self._stubs["fetch_backup_plan_associations_for_resource_type"] + @property def delete_backup_plan_association( self, @@ -1131,6 +1287,68 @@ def trigger_backup( ) return self._stubs["trigger_backup"] + @property + def get_data_source_reference( + self, + ) -> Callable[ + [datasourcereference.GetDataSourceReferenceRequest], + Awaitable[datasourcereference.DataSourceReference], + ]: + r"""Return a callable for the get data source reference method over gRPC. + + Gets details of a single DataSourceReference. + + Returns: + Callable[[~.GetDataSourceReferenceRequest], + Awaitable[~.DataSourceReference]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_source_reference" not in self._stubs: + self._stubs["get_data_source_reference"] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/GetDataSourceReference", + request_serializer=datasourcereference.GetDataSourceReferenceRequest.serialize, + response_deserializer=datasourcereference.DataSourceReference.deserialize, + ) + return self._stubs["get_data_source_reference"] + + @property + def fetch_data_source_references_for_resource_type( + self, + ) -> Callable[ + [datasourcereference.FetchDataSourceReferencesForResourceTypeRequest], + Awaitable[datasourcereference.FetchDataSourceReferencesForResourceTypeResponse], + ]: + r"""Return a callable for the fetch data source references + for resource type method over gRPC. + + Fetch DataSourceReferences for a given project, + location and resource type. + + Returns: + Callable[[~.FetchDataSourceReferencesForResourceTypeRequest], + Awaitable[~.FetchDataSourceReferencesForResourceTypeResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_data_source_references_for_resource_type" not in self._stubs: + self._stubs[ + "fetch_data_source_references_for_resource_type" + ] = self._logged_channel.unary_unary( + "/google.cloud.backupdr.v1.BackupDR/FetchDataSourceReferencesForResourceType", + request_serializer=datasourcereference.FetchDataSourceReferencesForResourceTypeRequest.serialize, + response_deserializer=datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.deserialize, + ) + return self._stubs["fetch_data_source_references_for_resource_type"] + @property def initialize_service( self, @@ -1356,6 +1574,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.update_backup_plan: self._wrap_method( + self.update_backup_plan, + default_timeout=None, + client_info=client_info, + ), self.get_backup_plan: self._wrap_method( self.get_backup_plan, default_timeout=None, @@ -1371,11 +1594,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_backup_plan_revision: self._wrap_method( + self.get_backup_plan_revision, + default_timeout=None, + client_info=client_info, + ), + self.list_backup_plan_revisions: self._wrap_method( + self.list_backup_plan_revisions, + default_timeout=None, + client_info=client_info, + ), self.create_backup_plan_association: self._wrap_method( self.create_backup_plan_association, default_timeout=None, client_info=client_info, ), + self.update_backup_plan_association: self._wrap_method( + self.update_backup_plan_association, + default_timeout=None, + client_info=client_info, + ), self.get_backup_plan_association: self._wrap_method( self.get_backup_plan_association, default_timeout=None, @@ -1386,6 +1624,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.fetch_backup_plan_associations_for_resource_type: self._wrap_method( + self.fetch_backup_plan_associations_for_resource_type, + default_timeout=None, + client_info=client_info, + ), self.delete_backup_plan_association: self._wrap_method( self.delete_backup_plan_association, default_timeout=None, @@ -1396,6 +1639,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_data_source_reference: self._wrap_method( + self.get_data_source_reference, + default_timeout=None, + client_info=client_info, + ), + self.fetch_data_source_references_for_resource_type: self._wrap_method( + self.fetch_data_source_references_for_resource_type, + default_timeout=None, + client_info=client_info, + ), self.initialize_service: self._wrap_method( self.initialize_service, default_retry=retries.AsyncRetry( diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py index 8816d08eabc0..a2d7b4ebf7b6 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest.py @@ -37,6 +37,7 @@ backupplan, backupplanassociation, backupvault, + datasourcereference, ) from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO @@ -153,6 +154,22 @@ def post_delete_management_server(self, response): logging.log(f"Received response: {response}") return response + def pre_fetch_backup_plan_associations_for_resource_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_backup_plan_associations_for_resource_type(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_fetch_data_source_references_for_resource_type(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_fetch_data_source_references_for_resource_type(self, response): + logging.log(f"Received response: {response}") + return response + def pre_fetch_usable_backup_vaults(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -185,6 +202,14 @@ def post_get_backup_plan_association(self, response): logging.log(f"Received response: {response}") return response + def pre_get_backup_plan_revision(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_backup_plan_revision(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_backup_vault(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -201,6 +226,14 @@ def post_get_data_source(self, response): logging.log(f"Received response: {response}") return response + def pre_get_data_source_reference(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_source_reference(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_management_server(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -225,6 +258,14 @@ def post_list_backup_plan_associations(self, response): logging.log(f"Received response: {response}") return response + def pre_list_backup_plan_revisions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_backup_plan_revisions(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_backup_plans(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -289,6 +330,22 @@ def post_update_backup(self, response): logging.log(f"Received response: {response}") return response + def pre_update_backup_plan(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_plan(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_backup_plan_association(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_backup_plan_association(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_backup_vault(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -745,6 +802,112 @@ def post_delete_management_server_with_metadata( """ return response, metadata + def pre_fetch_backup_plan_associations_for_resource_type( + self, + request: backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for fetch_backup_plan_associations_for_resource_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_fetch_backup_plan_associations_for_resource_type( + self, + response: backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + ) -> backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse: + """Post-rpc interceptor for fetch_backup_plan_associations_for_resource_type + + DEPRECATED. Please use the `post_fetch_backup_plan_associations_for_resource_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_fetch_backup_plan_associations_for_resource_type` interceptor runs + before the `post_fetch_backup_plan_associations_for_resource_type_with_metadata` interceptor. + """ + return response + + def post_fetch_backup_plan_associations_for_resource_type_with_metadata( + self, + response: backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_backup_plan_associations_for_resource_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_fetch_backup_plan_associations_for_resource_type_with_metadata` + interceptor in new development instead of the `post_fetch_backup_plan_associations_for_resource_type` interceptor. + When both interceptors are used, this `post_fetch_backup_plan_associations_for_resource_type_with_metadata` interceptor runs after the + `post_fetch_backup_plan_associations_for_resource_type` interceptor. The (possibly modified) response returned by + `post_fetch_backup_plan_associations_for_resource_type` will be passed to + `post_fetch_backup_plan_associations_for_resource_type_with_metadata`. + """ + return response, metadata + + def pre_fetch_data_source_references_for_resource_type( + self, + request: datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for fetch_data_source_references_for_resource_type + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_fetch_data_source_references_for_resource_type( + self, + response: datasourcereference.FetchDataSourceReferencesForResourceTypeResponse, + ) -> datasourcereference.FetchDataSourceReferencesForResourceTypeResponse: + """Post-rpc interceptor for fetch_data_source_references_for_resource_type + + DEPRECATED. Please use the `post_fetch_data_source_references_for_resource_type_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_fetch_data_source_references_for_resource_type` interceptor runs + before the `post_fetch_data_source_references_for_resource_type_with_metadata` interceptor. + """ + return response + + def post_fetch_data_source_references_for_resource_type_with_metadata( + self, + response: datasourcereference.FetchDataSourceReferencesForResourceTypeResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for fetch_data_source_references_for_resource_type + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_fetch_data_source_references_for_resource_type_with_metadata` + interceptor in new development instead of the `post_fetch_data_source_references_for_resource_type` interceptor. + When both interceptors are used, this `post_fetch_data_source_references_for_resource_type_with_metadata` interceptor runs after the + `post_fetch_data_source_references_for_resource_type` interceptor. The (possibly modified) response returned by + `post_fetch_data_source_references_for_resource_type` will be passed to + `post_fetch_data_source_references_for_resource_type_with_metadata`. + """ + return response, metadata + def pre_fetch_usable_backup_vaults( self, request: backupvault.FetchUsableBackupVaultsRequest, @@ -941,6 +1104,54 @@ def post_get_backup_plan_association_with_metadata( """ return response, metadata + def pre_get_backup_plan_revision( + self, + request: backupplan.GetBackupPlanRevisionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupplan.GetBackupPlanRevisionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_backup_plan_revision + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_backup_plan_revision( + self, response: backupplan.BackupPlanRevision + ) -> backupplan.BackupPlanRevision: + """Post-rpc interceptor for get_backup_plan_revision + + DEPRECATED. Please use the `post_get_backup_plan_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_get_backup_plan_revision` interceptor runs + before the `post_get_backup_plan_revision_with_metadata` interceptor. + """ + return response + + def post_get_backup_plan_revision_with_metadata( + self, + response: backupplan.BackupPlanRevision, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[backupplan.BackupPlanRevision, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_backup_plan_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_get_backup_plan_revision_with_metadata` + interceptor in new development instead of the `post_get_backup_plan_revision` interceptor. + When both interceptors are used, this `post_get_backup_plan_revision_with_metadata` interceptor runs after the + `post_get_backup_plan_revision` interceptor. The (possibly modified) response returned by + `post_get_backup_plan_revision` will be passed to + `post_get_backup_plan_revision_with_metadata`. + """ + return response, metadata + def pre_get_backup_vault( self, request: backupvault.GetBackupVaultRequest, @@ -1037,6 +1248,57 @@ def post_get_data_source_with_metadata( """ return response, metadata + def pre_get_data_source_reference( + self, + request: datasourcereference.GetDataSourceReferenceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datasourcereference.GetDataSourceReferenceRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_data_source_reference + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_data_source_reference( + self, response: datasourcereference.DataSourceReference + ) -> datasourcereference.DataSourceReference: + """Post-rpc interceptor for get_data_source_reference + + DEPRECATED. Please use the `post_get_data_source_reference_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_get_data_source_reference` interceptor runs + before the `post_get_data_source_reference_with_metadata` interceptor. + """ + return response + + def post_get_data_source_reference_with_metadata( + self, + response: datasourcereference.DataSourceReference, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + datasourcereference.DataSourceReference, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_data_source_reference + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_get_data_source_reference_with_metadata` + interceptor in new development instead of the `post_get_data_source_reference` interceptor. + When both interceptors are used, this `post_get_data_source_reference_with_metadata` interceptor runs after the + `post_get_data_source_reference` interceptor. The (possibly modified) response returned by + `post_get_data_source_reference` will be passed to + `post_get_data_source_reference_with_metadata`. + """ + return response, metadata + def pre_get_management_server( self, request: backupdr.GetManagementServerRequest, @@ -1185,6 +1447,58 @@ def post_list_backup_plan_associations_with_metadata( """ return response, metadata + def pre_list_backup_plan_revisions( + self, + request: backupplan.ListBackupPlanRevisionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupplan.ListBackupPlanRevisionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_backup_plan_revisions + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_list_backup_plan_revisions( + self, response: backupplan.ListBackupPlanRevisionsResponse + ) -> backupplan.ListBackupPlanRevisionsResponse: + """Post-rpc interceptor for list_backup_plan_revisions + + DEPRECATED. Please use the `post_list_backup_plan_revisions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_list_backup_plan_revisions` interceptor runs + before the `post_list_backup_plan_revisions_with_metadata` interceptor. + """ + return response + + def post_list_backup_plan_revisions_with_metadata( + self, + response: backupplan.ListBackupPlanRevisionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupplan.ListBackupPlanRevisionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_backup_plan_revisions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_list_backup_plan_revisions_with_metadata` + interceptor in new development instead of the `post_list_backup_plan_revisions` interceptor. + When both interceptors are used, this `post_list_backup_plan_revisions_with_metadata` interceptor runs after the + `post_list_backup_plan_revisions` interceptor. The (possibly modified) response returned by + `post_list_backup_plan_revisions` will be passed to + `post_list_backup_plan_revisions_with_metadata`. + """ + return response, metadata + def pre_list_backup_plans( self, request: backupplan.ListBackupPlansRequest, @@ -1578,130 +1892,227 @@ def post_update_backup_with_metadata( """ return response, metadata - def pre_update_backup_vault( + def pre_update_backup_plan( self, - request: backupvault.UpdateBackupVaultRequest, + request: backupplan.UpdateBackupPlanRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - backupvault.UpdateBackupVaultRequest, Sequence[Tuple[str, Union[str, bytes]]] + backupplan.UpdateBackupPlanRequest, Sequence[Tuple[str, Union[str, bytes]]] ]: - """Pre-rpc interceptor for update_backup_vault + """Pre-rpc interceptor for update_backup_plan Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_update_backup_vault( + def post_update_backup_plan( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_backup_vault + """Post-rpc interceptor for update_backup_plan - DEPRECATED. Please use the `post_update_backup_vault_with_metadata` + DEPRECATED. Please use the `post_update_backup_plan_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. This `post_update_backup_vault` interceptor runs - before the `post_update_backup_vault_with_metadata` interceptor. + it is returned to user code. This `post_update_backup_plan` interceptor runs + before the `post_update_backup_plan_with_metadata` interceptor. """ return response - def post_update_backup_vault_with_metadata( + def post_update_backup_plan_with_metadata( self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_backup_vault + """Post-rpc interceptor for update_backup_plan Override in a subclass to read or manipulate the response or metadata after it is returned by the BackupDR server but before it is returned to user code. - We recommend only using this `post_update_backup_vault_with_metadata` - interceptor in new development instead of the `post_update_backup_vault` interceptor. - When both interceptors are used, this `post_update_backup_vault_with_metadata` interceptor runs after the - `post_update_backup_vault` interceptor. The (possibly modified) response returned by - `post_update_backup_vault` will be passed to - `post_update_backup_vault_with_metadata`. + We recommend only using this `post_update_backup_plan_with_metadata` + interceptor in new development instead of the `post_update_backup_plan` interceptor. + When both interceptors are used, this `post_update_backup_plan_with_metadata` interceptor runs after the + `post_update_backup_plan` interceptor. The (possibly modified) response returned by + `post_update_backup_plan` will be passed to + `post_update_backup_plan_with_metadata`. """ return response, metadata - def pre_update_data_source( + def pre_update_backup_plan_association( self, - request: backupvault.UpdateDataSourceRequest, + request: backupplanassociation.UpdateBackupPlanAssociationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - backupvault.UpdateDataSourceRequest, Sequence[Tuple[str, Union[str, bytes]]] + backupplanassociation.UpdateBackupPlanAssociationRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: - """Pre-rpc interceptor for update_data_source + """Pre-rpc interceptor for update_backup_plan_association Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_update_data_source( + def post_update_backup_plan_association( self, response: operations_pb2.Operation ) -> operations_pb2.Operation: - """Post-rpc interceptor for update_data_source + """Post-rpc interceptor for update_backup_plan_association - DEPRECATED. Please use the `post_update_data_source_with_metadata` + DEPRECATED. Please use the `post_update_backup_plan_association_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. This `post_update_data_source` interceptor runs - before the `post_update_data_source_with_metadata` interceptor. + it is returned to user code. This `post_update_backup_plan_association` interceptor runs + before the `post_update_backup_plan_association_with_metadata` interceptor. """ return response - def post_update_data_source_with_metadata( + def post_update_backup_plan_association_with_metadata( self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for update_data_source + """Post-rpc interceptor for update_backup_plan_association Override in a subclass to read or manipulate the response or metadata after it is returned by the BackupDR server but before it is returned to user code. - We recommend only using this `post_update_data_source_with_metadata` - interceptor in new development instead of the `post_update_data_source` interceptor. - When both interceptors are used, this `post_update_data_source_with_metadata` interceptor runs after the - `post_update_data_source` interceptor. The (possibly modified) response returned by - `post_update_data_source` will be passed to - `post_update_data_source_with_metadata`. + We recommend only using this `post_update_backup_plan_association_with_metadata` + interceptor in new development instead of the `post_update_backup_plan_association` interceptor. + When both interceptors are used, this `post_update_backup_plan_association_with_metadata` interceptor runs after the + `post_update_backup_plan_association` interceptor. The (possibly modified) response returned by + `post_update_backup_plan_association` will be passed to + `post_update_backup_plan_association_with_metadata`. """ return response, metadata - def pre_get_location( + def pre_update_backup_vault( self, - request: locations_pb2.GetLocationRequest, + request: backupvault.UpdateBackupVaultRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + backupvault.UpdateBackupVaultRequest, Sequence[Tuple[str, Union[str, bytes]]] ]: - """Pre-rpc interceptor for get_location + """Pre-rpc interceptor for update_backup_vault Override in a subclass to manipulate the request or metadata before they are sent to the BackupDR server. """ return request, metadata - def post_get_location( - self, response: locations_pb2.Location - ) -> locations_pb2.Location: - """Post-rpc interceptor for get_location + def post_update_backup_vault( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_backup_vault - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_backup_vault_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the BackupDR server but before - it is returned to user code. + it is returned to user code. This `post_update_backup_vault` interceptor runs + before the `post_update_backup_vault_with_metadata` interceptor. """ return response - def pre_list_locations( + def post_update_backup_vault_with_metadata( self, - request: locations_pb2.ListLocationsRequest, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_backup_vault + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_update_backup_vault_with_metadata` + interceptor in new development instead of the `post_update_backup_vault` interceptor. + When both interceptors are used, this `post_update_backup_vault_with_metadata` interceptor runs after the + `post_update_backup_vault` interceptor. The (possibly modified) response returned by + `post_update_backup_vault` will be passed to + `post_update_backup_vault_with_metadata`. + """ + return response, metadata + + def pre_update_data_source( + self, + request: backupvault.UpdateDataSourceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + backupvault.UpdateDataSourceRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_data_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_update_data_source( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_data_source + + DEPRECATED. Please use the `post_update_data_source_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. This `post_update_data_source` interceptor runs + before the `post_update_data_source_with_metadata` interceptor. + """ + return response + + def post_update_data_source_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_source + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackupDR server but before it is returned to user code. + + We recommend only using this `post_update_data_source_with_metadata` + interceptor in new development instead of the `post_update_data_source` interceptor. + When both interceptors are used, this `post_update_data_source_with_metadata` interceptor runs after the + `post_update_data_source` interceptor. The (possibly modified) response returned by + `post_update_data_source` will be passed to + `post_update_data_source_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackupDR server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BackupDR server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] @@ -3398,11 +3809,14 @@ def __call__( ) return resp - class _FetchUsableBackupVaults( - _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults, BackupDRRestStub + class _FetchBackupPlanAssociationsForResourceType( + _BaseBackupDRRestTransport._BaseFetchBackupPlanAssociationsForResourceType, + BackupDRRestStub, ): def __hash__(self): - return hash("BackupDRRestTransport.FetchUsableBackupVaults") + return hash( + "BackupDRRestTransport.FetchBackupPlanAssociationsForResourceType" + ) @staticmethod def _get_response( @@ -3428,19 +3842,20 @@ def _get_response( def __call__( self, - request: backupvault.FetchUsableBackupVaultsRequest, + request: backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.FetchUsableBackupVaultsResponse: - r"""Call the fetch usable backup - vaults method over HTTP. + ) -> backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse: + r"""Call the fetch backup plan + associations for resource type method over HTTP. Args: - request (~.backupvault.FetchUsableBackupVaultsRequest): - The request object. Request message for fetching usable - BackupVaults. + request (~.backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest): + The request object. Request for the + FetchBackupPlanAssociationsForResourceType + method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3450,25 +3865,29 @@ def __call__( be of type `bytes`. Returns: - ~.backupvault.FetchUsableBackupVaultsResponse: - Response message for fetching usable - BackupVaults. + ~.backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse: + Response for the + FetchBackupPlanAssociationsForResourceType + method. """ http_options = ( - _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_http_options() + _BaseBackupDRRestTransport._BaseFetchBackupPlanAssociationsForResourceType._get_http_options() ) - request, metadata = self._interceptor.pre_fetch_usable_backup_vaults( + ( + request, + metadata, + ) = self._interceptor.pre_fetch_backup_plan_associations_for_resource_type( request, metadata ) - transcoded_request = _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_transcoded_request( + transcoded_request = _BaseBackupDRRestTransport._BaseFetchBackupPlanAssociationsForResourceType._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_query_params_json( + query_params = _BaseBackupDRRestTransport._BaseFetchBackupPlanAssociationsForResourceType._get_query_params_json( transcoded_request ) @@ -3490,17 +3909,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.FetchUsableBackupVaults", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.FetchBackupPlanAssociationsForResourceType", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "FetchUsableBackupVaults", + "rpcName": "FetchBackupPlanAssociationsForResourceType", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._FetchUsableBackupVaults._get_response( + response = BackupDRRestTransport._FetchBackupPlanAssociationsForResourceType._get_response( self._host, metadata, query_params, @@ -3515,22 +3934,33 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupvault.FetchUsableBackupVaultsResponse() - pb_resp = backupvault.FetchUsableBackupVaultsResponse.pb(resp) + resp = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) + pb_resp = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.pb( + resp + ) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_fetch_usable_backup_vaults(resp) + resp = ( + self._interceptor.post_fetch_backup_plan_associations_for_resource_type( + resp + ) + ) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_fetch_usable_backup_vaults_with_metadata( + ( + resp, + _, + ) = self._interceptor.post_fetch_backup_plan_associations_for_resource_type_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = ( - backupvault.FetchUsableBackupVaultsResponse.to_json(response) + response_payload = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.to_json( + response ) except: response_payload = None @@ -3540,19 +3970,24 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", + "Received response for google.cloud.backupdr_v1.BackupDRClient.fetch_backup_plan_associations_for_resource_type", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "FetchUsableBackupVaults", + "rpcName": "FetchBackupPlanAssociationsForResourceType", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetBackup(_BaseBackupDRRestTransport._BaseGetBackup, BackupDRRestStub): + class _FetchDataSourceReferencesForResourceType( + _BaseBackupDRRestTransport._BaseFetchDataSourceReferencesForResourceType, + BackupDRRestStub, + ): def __hash__(self): - return hash("BackupDRRestTransport.GetBackup") + return hash( + "BackupDRRestTransport.FetchDataSourceReferencesForResourceType" + ) @staticmethod def _get_response( @@ -3578,44 +4013,53 @@ def _get_response( def __call__( self, - request: backupvault.GetBackupRequest, + request: datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.Backup: - r"""Call the get backup method over HTTP. + ) -> datasourcereference.FetchDataSourceReferencesForResourceTypeResponse: + r"""Call the fetch data source + references for resource type method over HTTP. - Args: - request (~.backupvault.GetBackupRequest): - The request object. Request message for getting a Backup. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.datasourcereference.FetchDataSourceReferencesForResourceTypeRequest): + The request object. Request for the + FetchDataSourceReferencesForResourceType + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datasourcereference.FetchDataSourceReferencesForResourceTypeResponse: + Response for the + FetchDataSourceReferencesForResourceType + method. - Returns: - ~.backupvault.Backup: - Message describing a Backup object. """ - http_options = _BaseBackupDRRestTransport._BaseGetBackup._get_http_options() + http_options = ( + _BaseBackupDRRestTransport._BaseFetchDataSourceReferencesForResourceType._get_http_options() + ) - request, metadata = self._interceptor.pre_get_backup(request, metadata) - transcoded_request = ( - _BaseBackupDRRestTransport._BaseGetBackup._get_transcoded_request( - http_options, request - ) + ( + request, + metadata, + ) = self._interceptor.pre_fetch_data_source_references_for_resource_type( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseFetchDataSourceReferencesForResourceType._get_transcoded_request( + http_options, request ) # Jsonify the query params - query_params = ( - _BaseBackupDRRestTransport._BaseGetBackup._get_query_params_json( - transcoded_request - ) + query_params = _BaseBackupDRRestTransport._BaseFetchDataSourceReferencesForResourceType._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -3636,17 +4080,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackup", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.FetchDataSourceReferencesForResourceType", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackup", + "rpcName": "FetchDataSourceReferencesForResourceType", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._GetBackup._get_response( + response = BackupDRRestTransport._FetchDataSourceReferencesForResourceType._get_response( self._host, metadata, query_params, @@ -3661,21 +4105,36 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupvault.Backup() - pb_resp = backupvault.Backup.pb(resp) + resp = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + pb_resp = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.pb( + resp + ) + ) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_backup(resp) + resp = ( + self._interceptor.post_fetch_data_source_references_for_resource_type( + resp + ) + ) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_with_metadata( + ( + resp, + _, + ) = self._interceptor.post_fetch_data_source_references_for_resource_type_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupvault.Backup.to_json(response) + response_payload = datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.to_json( + response + ) except: response_payload = None http_response = { @@ -3684,21 +4143,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup", + "Received response for google.cloud.backupdr_v1.BackupDRClient.fetch_data_source_references_for_resource_type", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackup", + "rpcName": "FetchDataSourceReferencesForResourceType", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetBackupPlan( - _BaseBackupDRRestTransport._BaseGetBackupPlan, BackupDRRestStub + class _FetchUsableBackupVaults( + _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.GetBackupPlan") + return hash("BackupDRRestTransport.FetchUsableBackupVaults") @staticmethod def _get_response( @@ -3724,51 +4183,48 @@ def _get_response( def __call__( self, - request: backupplan.GetBackupPlanRequest, + request: backupvault.FetchUsableBackupVaultsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplan.BackupPlan: - r"""Call the get backup plan method over HTTP. + ) -> backupvault.FetchUsableBackupVaultsResponse: + r"""Call the fetch usable backup + vaults method over HTTP. - Args: - request (~.backupplan.GetBackupPlanRequest): - The request object. The request message for getting a ``BackupPlan``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.backupvault.FetchUsableBackupVaultsRequest): + The request object. Request message for fetching usable + BackupVaults. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.backupplan.BackupPlan: - A ``BackupPlan`` specifies some common fields, such as - ``description`` as well as one or more ``BackupRule`` - messages. Each ``BackupRule`` has a retention policy and - defines a schedule by which the system is to perform - backup workloads. + Returns: + ~.backupvault.FetchUsableBackupVaultsResponse: + Response message for fetching usable + BackupVaults. """ http_options = ( - _BaseBackupDRRestTransport._BaseGetBackupPlan._get_http_options() + _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_http_options() ) - request, metadata = self._interceptor.pre_get_backup_plan(request, metadata) - transcoded_request = ( - _BaseBackupDRRestTransport._BaseGetBackupPlan._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_fetch_usable_backup_vaults( + request, metadata ) - - # Jsonify the query params - query_params = ( - _BaseBackupDRRestTransport._BaseGetBackupPlan._get_query_params_json( - transcoded_request - ) + transcoded_request = _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseFetchUsableBackupVaults._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -3789,17 +4245,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupPlan", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.FetchUsableBackupVaults", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupPlan", + "rpcName": "FetchUsableBackupVaults", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._GetBackupPlan._get_response( + response = BackupDRRestTransport._FetchUsableBackupVaults._get_response( self._host, metadata, query_params, @@ -3814,21 +4270,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupplan.BackupPlan() - pb_resp = backupplan.BackupPlan.pb(resp) + resp = backupvault.FetchUsableBackupVaultsResponse() + pb_resp = backupvault.FetchUsableBackupVaultsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_backup_plan(resp) + resp = self._interceptor.post_fetch_usable_backup_vaults(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_plan_with_metadata( + resp, _ = self._interceptor.post_fetch_usable_backup_vaults_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupplan.BackupPlan.to_json(response) + response_payload = ( + backupvault.FetchUsableBackupVaultsResponse.to_json(response) + ) except: response_payload = None http_response = { @@ -3837,21 +4295,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", + "Received response for google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupPlan", + "rpcName": "FetchUsableBackupVaults", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetBackupPlanAssociation( - _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation, BackupDRRestStub - ): + class _GetBackup(_BaseBackupDRRestTransport._BaseGetBackup, BackupDRRestStub): def __hash__(self): - return hash("BackupDRRestTransport.GetBackupPlanAssociation") + return hash("BackupDRRestTransport.GetBackup") @staticmethod def _get_response( @@ -3877,50 +4333,44 @@ def _get_response( def __call__( self, - request: backupplanassociation.GetBackupPlanAssociationRequest, + request: backupvault.GetBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplanassociation.BackupPlanAssociation: - r"""Call the get backup plan - association method over HTTP. - - Args: - request (~.backupplanassociation.GetBackupPlanAssociationRequest): - The request object. Request message for getting a - BackupPlanAssociation resource. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + ) -> backupvault.Backup: + r"""Call the get backup method over HTTP. - Returns: - ~.backupplanassociation.BackupPlanAssociation: - A BackupPlanAssociation represents a - single BackupPlanAssociation which - contains details like workload, backup - plan etc + Args: + request (~.backupvault.GetBackupRequest): + The request object. Request message for getting a Backup. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.backupvault.Backup: + Message describing a Backup object. """ - http_options = ( - _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_http_options() - ) + http_options = _BaseBackupDRRestTransport._BaseGetBackup._get_http_options() - request, metadata = self._interceptor.pre_get_backup_plan_association( - request, metadata - ) - transcoded_request = _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_transcoded_request( - http_options, request + request, metadata = self._interceptor.pre_get_backup(request, metadata) + transcoded_request = ( + _BaseBackupDRRestTransport._BaseGetBackup._get_transcoded_request( + http_options, request + ) ) # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_query_params_json( - transcoded_request + query_params = ( + _BaseBackupDRRestTransport._BaseGetBackup._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -3941,17 +4391,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupPlanAssociation", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackup", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupPlanAssociation", + "rpcName": "GetBackup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._GetBackupPlanAssociation._get_response( + response = BackupDRRestTransport._GetBackup._get_response( self._host, metadata, query_params, @@ -3966,23 +4416,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupplanassociation.BackupPlanAssociation() - pb_resp = backupplanassociation.BackupPlanAssociation.pb(resp) + resp = backupvault.Backup() + pb_resp = backupvault.Backup.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_backup_plan_association(resp) + resp = self._interceptor.post_get_backup(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_plan_association_with_metadata( + resp, _ = self._interceptor.post_get_backup_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = ( - backupplanassociation.BackupPlanAssociation.to_json(response) - ) + response_payload = backupvault.Backup.to_json(response) except: response_payload = None http_response = { @@ -3991,21 +4439,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", + "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupPlanAssociation", + "rpcName": "GetBackup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetBackupVault( - _BaseBackupDRRestTransport._BaseGetBackupVault, BackupDRRestStub + class _GetBackupPlan( + _BaseBackupDRRestTransport._BaseGetBackupPlan, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.GetBackupVault") + return hash("BackupDRRestTransport.GetBackupPlan") @staticmethod def _get_response( @@ -4031,18 +4479,17 @@ def _get_response( def __call__( self, - request: backupvault.GetBackupVaultRequest, + request: backupplan.GetBackupPlanRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.BackupVault: - r"""Call the get backup vault method over HTTP. + ) -> backupplan.BackupPlan: + r"""Call the get backup plan method over HTTP. Args: - request (~.backupvault.GetBackupVaultRequest): - The request object. Request message for getting a - BackupVault. + request (~.backupplan.GetBackupPlanRequest): + The request object. The request message for getting a ``BackupPlan``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4052,28 +4499,29 @@ def __call__( be of type `bytes`. Returns: - ~.backupvault.BackupVault: - Message describing a BackupVault - object. + ~.backupplan.BackupPlan: + A ``BackupPlan`` specifies some common fields, such as + ``description`` as well as one or more ``BackupRule`` + messages. Each ``BackupRule`` has a retention policy and + defines a schedule by which the system is to perform + backup workloads. """ http_options = ( - _BaseBackupDRRestTransport._BaseGetBackupVault._get_http_options() + _BaseBackupDRRestTransport._BaseGetBackupPlan._get_http_options() ) - request, metadata = self._interceptor.pre_get_backup_vault( - request, metadata - ) + request, metadata = self._interceptor.pre_get_backup_plan(request, metadata) transcoded_request = ( - _BaseBackupDRRestTransport._BaseGetBackupVault._get_transcoded_request( + _BaseBackupDRRestTransport._BaseGetBackupPlan._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseBackupDRRestTransport._BaseGetBackupVault._get_query_params_json( + _BaseBackupDRRestTransport._BaseGetBackupPlan._get_query_params_json( transcoded_request ) ) @@ -4096,17 +4544,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupVault", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupPlan", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupVault", + "rpcName": "GetBackupPlan", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._GetBackupVault._get_response( + response = BackupDRRestTransport._GetBackupPlan._get_response( self._host, metadata, query_params, @@ -4121,21 +4569,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupvault.BackupVault() - pb_resp = backupvault.BackupVault.pb(resp) + resp = backupplan.BackupPlan() + pb_resp = backupplan.BackupPlan.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_backup_vault(resp) + resp = self._interceptor.post_get_backup_plan(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_backup_vault_with_metadata( + resp, _ = self._interceptor.post_get_backup_plan_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupvault.BackupVault.to_json(response) + response_payload = backupplan.BackupPlan.to_json(response) except: response_payload = None http_response = { @@ -4144,21 +4592,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", + "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetBackupVault", + "rpcName": "GetBackupPlan", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetDataSource( - _BaseBackupDRRestTransport._BaseGetDataSource, BackupDRRestStub + class _GetBackupPlanAssociation( + _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.GetDataSource") + return hash("BackupDRRestTransport.GetBackupPlanAssociation") @staticmethod def _get_response( @@ -4184,51 +4632,50 @@ def _get_response( def __call__( self, - request: backupvault.GetDataSourceRequest, + request: backupplanassociation.GetBackupPlanAssociationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.DataSource: - r"""Call the get data source method over HTTP. + ) -> backupplanassociation.BackupPlanAssociation: + r"""Call the get backup plan + association method over HTTP. - Args: - request (~.backupvault.GetDataSourceRequest): - The request object. Request message for getting a - DataSource instance. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.backupplanassociation.GetBackupPlanAssociationRequest): + The request object. Request message for getting a + BackupPlanAssociation resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.backupvault.DataSource: - Message describing a DataSource - object. Datasource object used to - represent Datasource details for both - admin and basic view. + Returns: + ~.backupplanassociation.BackupPlanAssociation: + A BackupPlanAssociation represents a + single BackupPlanAssociation which + contains details like workload, backup + plan etc """ http_options = ( - _BaseBackupDRRestTransport._BaseGetDataSource._get_http_options() + _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_http_options() ) - request, metadata = self._interceptor.pre_get_data_source(request, metadata) - transcoded_request = ( - _BaseBackupDRRestTransport._BaseGetDataSource._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_get_backup_plan_association( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_transcoded_request( + http_options, request ) # Jsonify the query params - query_params = ( - _BaseBackupDRRestTransport._BaseGetDataSource._get_query_params_json( - transcoded_request - ) + query_params = _BaseBackupDRRestTransport._BaseGetBackupPlanAssociation._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -4249,17 +4696,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetDataSource", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupPlanAssociation", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetDataSource", + "rpcName": "GetBackupPlanAssociation", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._GetDataSource._get_response( + response = BackupDRRestTransport._GetBackupPlanAssociation._get_response( self._host, metadata, query_params, @@ -4274,22 +4721,24 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupvault.DataSource() - pb_resp = backupvault.DataSource.pb(resp) + resp = backupplanassociation.BackupPlanAssociation() + pb_resp = backupplanassociation.BackupPlanAssociation.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_data_source(resp) + resp = self._interceptor.post_get_backup_plan_association(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_data_source_with_metadata( + resp, _ = self._interceptor.post_get_backup_plan_association_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupvault.DataSource.to_json(response) - except: + response_payload = ( + backupplanassociation.BackupPlanAssociation.to_json(response) + ) + except: response_payload = None http_response = { "payload": response_payload, @@ -4297,21 +4746,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_data_source", + "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetDataSource", + "rpcName": "GetBackupPlanAssociation", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetManagementServer( - _BaseBackupDRRestTransport._BaseGetManagementServer, BackupDRRestStub + class _GetBackupPlanRevision( + _BaseBackupDRRestTransport._BaseGetBackupPlanRevision, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.GetManagementServer") + return hash("BackupDRRestTransport.GetBackupPlanRevision") @staticmethod def _get_response( @@ -4337,18 +4786,18 @@ def _get_response( def __call__( self, - request: backupdr.GetManagementServerRequest, + request: backupplan.GetBackupPlanRevisionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupdr.ManagementServer: - r"""Call the get management server method over HTTP. + ) -> backupplan.BackupPlanRevision: + r"""Call the get backup plan revision method over HTTP. Args: - request (~.backupdr.GetManagementServerRequest): - The request object. Request message for getting a - management server instance. + request (~.backupplan.GetBackupPlanRevisionRequest): + The request object. The request message for getting a + ``BackupPlanRevision``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4358,25 +4807,25 @@ def __call__( be of type `bytes`. Returns: - ~.backupdr.ManagementServer: - ManagementServer describes a single - BackupDR ManagementServer instance. + ~.backupplan.BackupPlanRevision: + ``BackupPlanRevision`` represents a snapshot of a + ``BackupPlan`` at a point in time. """ http_options = ( - _BaseBackupDRRestTransport._BaseGetManagementServer._get_http_options() + _BaseBackupDRRestTransport._BaseGetBackupPlanRevision._get_http_options() ) - request, metadata = self._interceptor.pre_get_management_server( + request, metadata = self._interceptor.pre_get_backup_plan_revision( request, metadata ) - transcoded_request = _BaseBackupDRRestTransport._BaseGetManagementServer._get_transcoded_request( + transcoded_request = _BaseBackupDRRestTransport._BaseGetBackupPlanRevision._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseGetManagementServer._get_query_params_json( + query_params = _BaseBackupDRRestTransport._BaseGetBackupPlanRevision._get_query_params_json( transcoded_request ) @@ -4398,17 +4847,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetManagementServer", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupPlanRevision", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetManagementServer", + "rpcName": "GetBackupPlanRevision", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._GetManagementServer._get_response( + response = BackupDRRestTransport._GetBackupPlanRevision._get_response( self._host, metadata, query_params, @@ -4423,21 +4872,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ManagementServer() - pb_resp = backupdr.ManagementServer.pb(resp) + resp = backupplan.BackupPlanRevision() + pb_resp = backupplan.BackupPlanRevision.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_management_server(resp) + resp = self._interceptor.post_get_backup_plan_revision(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_management_server_with_metadata( + resp, _ = self._interceptor.post_get_backup_plan_revision_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupdr.ManagementServer.to_json(response) + response_payload = backupplan.BackupPlanRevision.to_json(response) except: response_payload = None http_response = { @@ -4446,21 +4895,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_revision", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "GetManagementServer", + "rpcName": "GetBackupPlanRevision", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _InitializeService( - _BaseBackupDRRestTransport._BaseInitializeService, BackupDRRestStub + class _GetBackupVault( + _BaseBackupDRRestTransport._BaseGetBackupVault, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.InitializeService") + return hash("BackupDRRestTransport.GetBackupVault") @staticmethod def _get_response( @@ -4481,24 +4930,23 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: backupdr.InitializeServiceRequest, + request: backupvault.GetBackupVaultRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> operations_pb2.Operation: - r"""Call the initialize service method over HTTP. + ) -> backupvault.BackupVault: + r"""Call the get backup vault method over HTTP. Args: - request (~.backupdr.InitializeServiceRequest): - The request object. Request message for initializing the - service. + request (~.backupvault.GetBackupVaultRequest): + The request object. Request message for getting a + BackupVault. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4508,31 +4956,30 @@ def __call__( be of type `bytes`. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + ~.backupvault.BackupVault: + Message describing a BackupVault + object. """ http_options = ( - _BaseBackupDRRestTransport._BaseInitializeService._get_http_options() + _BaseBackupDRRestTransport._BaseGetBackupVault._get_http_options() ) - request, metadata = self._interceptor.pre_initialize_service( + request, metadata = self._interceptor.pre_get_backup_vault( request, metadata ) - transcoded_request = _BaseBackupDRRestTransport._BaseInitializeService._get_transcoded_request( - http_options, request - ) - - body = _BaseBackupDRRestTransport._BaseInitializeService._get_request_body_json( - transcoded_request + transcoded_request = ( + _BaseBackupDRRestTransport._BaseGetBackupVault._get_transcoded_request( + http_options, request + ) ) # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseInitializeService._get_query_params_json( - transcoded_request + query_params = ( + _BaseBackupDRRestTransport._BaseGetBackupVault._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -4543,7 +4990,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -4553,24 +5000,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.InitializeService", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetBackupVault", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "InitializeService", + "rpcName": "GetBackupVault", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._InitializeService._get_response( + response = BackupDRRestTransport._GetBackupVault._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -4579,19 +5025,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = backupvault.BackupVault() + pb_resp = backupvault.BackupVault.pb(resp) - resp = self._interceptor.post_initialize_service(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_backup_vault(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_initialize_service_with_metadata( + resp, _ = self._interceptor.post_get_backup_vault_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = backupvault.BackupVault.to_json(response) except: response_payload = None http_response = { @@ -4600,21 +5048,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.initialize_service", + "Received response for google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "InitializeService", + "rpcName": "GetBackupVault", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListBackupPlanAssociations( - _BaseBackupDRRestTransport._BaseListBackupPlanAssociations, BackupDRRestStub + class _GetDataSource( + _BaseBackupDRRestTransport._BaseGetDataSource, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.ListBackupPlanAssociations") + return hash("BackupDRRestTransport.GetDataSource") @staticmethod def _get_response( @@ -4640,48 +5088,1112 @@ def _get_response( def __call__( self, - request: backupplanassociation.ListBackupPlanAssociationsRequest, + request: backupvault.GetDataSourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplanassociation.ListBackupPlanAssociationsResponse: - r"""Call the list backup plan - associations method over HTTP. + ) -> backupvault.DataSource: + r"""Call the get data source method over HTTP. - Args: - request (~.backupplanassociation.ListBackupPlanAssociationsRequest): - The request object. Request message for List - BackupPlanAssociation - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.backupvault.GetDataSourceRequest): + The request object. Request message for getting a + DataSource instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.backupplanassociation.ListBackupPlanAssociationsResponse: - Response message for List - BackupPlanAssociation + Returns: + ~.backupvault.DataSource: + Message describing a DataSource + object. Datasource object used to + represent Datasource details for both + admin and basic view. """ http_options = ( - _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_http_options() + _BaseBackupDRRestTransport._BaseGetDataSource._get_http_options() ) - request, metadata = self._interceptor.pre_list_backup_plan_associations( - request, metadata + request, metadata = self._interceptor.pre_get_data_source(request, metadata) + transcoded_request = ( + _BaseBackupDRRestTransport._BaseGetDataSource._get_transcoded_request( + http_options, request + ) ) - transcoded_request = _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_transcoded_request( - http_options, request + + # Jsonify the query params + query_params = ( + _BaseBackupDRRestTransport._BaseGetDataSource._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetDataSource", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "GetDataSource", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._GetDataSource._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupvault.DataSource() + pb_resp = backupvault.DataSource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_source(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_source_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = backupvault.DataSource.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.get_data_source", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "GetDataSource", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDataSourceReference( + _BaseBackupDRRestTransport._BaseGetDataSourceReference, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.GetDataSourceReference") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: datasourcereference.GetDataSourceReferenceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> datasourcereference.DataSourceReference: + r"""Call the get data source reference method over HTTP. + + Args: + request (~.datasourcereference.GetDataSourceReferenceRequest): + The request object. Request for the + GetDataSourceReference method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.datasourcereference.DataSourceReference: + DataSourceReference is a reference to + a DataSource resource. + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseGetDataSourceReference._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_data_source_reference( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseGetDataSourceReference._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseGetDataSourceReference._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetDataSourceReference", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "GetDataSourceReference", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._GetDataSourceReference._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datasourcereference.DataSourceReference() + pb_resp = datasourcereference.DataSourceReference.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_source_reference(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_source_reference_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = datasourcereference.DataSourceReference.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.get_data_source_reference", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "GetDataSourceReference", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetManagementServer( + _BaseBackupDRRestTransport._BaseGetManagementServer, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.GetManagementServer") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backupdr.GetManagementServerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backupdr.ManagementServer: + r"""Call the get management server method over HTTP. + + Args: + request (~.backupdr.GetManagementServerRequest): + The request object. Request message for getting a + management server instance. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backupdr.ManagementServer: + ManagementServer describes a single + BackupDR ManagementServer instance. + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseGetManagementServer._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_management_server( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseGetManagementServer._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseGetManagementServer._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.GetManagementServer", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "GetManagementServer", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._GetManagementServer._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupdr.ManagementServer() + pb_resp = backupdr.ManagementServer.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_management_server(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_management_server_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = backupdr.ManagementServer.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.get_management_server", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "GetManagementServer", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _InitializeService( + _BaseBackupDRRestTransport._BaseInitializeService, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.InitializeService") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: backupdr.InitializeServiceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the initialize service method over HTTP. + + Args: + request (~.backupdr.InitializeServiceRequest): + The request object. Request message for initializing the + service. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseInitializeService._get_http_options() + ) + + request, metadata = self._interceptor.pre_initialize_service( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseInitializeService._get_transcoded_request( + http_options, request + ) + + body = _BaseBackupDRRestTransport._BaseInitializeService._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseInitializeService._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.InitializeService", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "InitializeService", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._InitializeService._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_initialize_service(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_initialize_service_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.initialize_service", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "InitializeService", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackupPlanAssociations( + _BaseBackupDRRestTransport._BaseListBackupPlanAssociations, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.ListBackupPlanAssociations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backupplanassociation.ListBackupPlanAssociationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backupplanassociation.ListBackupPlanAssociationsResponse: + r"""Call the list backup plan + associations method over HTTP. + + Args: + request (~.backupplanassociation.ListBackupPlanAssociationsRequest): + The request object. Request message for List + BackupPlanAssociation + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backupplanassociation.ListBackupPlanAssociationsResponse: + Response message for List + BackupPlanAssociation + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_backup_plan_associations( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupPlanAssociations", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "ListBackupPlanAssociations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._ListBackupPlanAssociations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplanassociation.ListBackupPlanAssociationsResponse() + pb_resp = backupplanassociation.ListBackupPlanAssociationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backup_plan_associations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + ( + resp, + _, + ) = self._interceptor.post_list_backup_plan_associations_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = backupplanassociation.ListBackupPlanAssociationsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "ListBackupPlanAssociations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackupPlanRevisions( + _BaseBackupDRRestTransport._BaseListBackupPlanRevisions, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.ListBackupPlanRevisions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backupplan.ListBackupPlanRevisionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backupplan.ListBackupPlanRevisionsResponse: + r"""Call the list backup plan + revisions method over HTTP. + + Args: + request (~.backupplan.ListBackupPlanRevisionsRequest): + The request object. The request message for getting a list of + ``BackupPlanRevision``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backupplan.ListBackupPlanRevisionsResponse: + The response message for getting a list of + ``BackupPlanRevision``. + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseListBackupPlanRevisions._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_backup_plan_revisions( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseListBackupPlanRevisions._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackupDRRestTransport._BaseListBackupPlanRevisions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupPlanRevisions", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "ListBackupPlanRevisions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._ListBackupPlanRevisions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.ListBackupPlanRevisionsResponse() + pb_resp = backupplan.ListBackupPlanRevisionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backup_plan_revisions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_plan_revisions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + backupplan.ListBackupPlanRevisionsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_revisions", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "ListBackupPlanRevisions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackupPlans( + _BaseBackupDRRestTransport._BaseListBackupPlans, BackupDRRestStub + ): + def __hash__(self): + return hash("BackupDRRestTransport.ListBackupPlans") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backupplan.ListBackupPlansRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backupplan.ListBackupPlansResponse: + r"""Call the list backup plans method over HTTP. + + Args: + request (~.backupplan.ListBackupPlansRequest): + The request object. The request message for getting a list ``BackupPlan``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backupplan.ListBackupPlansResponse: + The response message for getting a list of + ``BackupPlan``. + + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseListBackupPlans._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_backup_plans( + request, metadata + ) + transcoded_request = ( + _BaseBackupDRRestTransport._BaseListBackupPlans._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseBackupDRRestTransport._BaseListBackupPlans._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupPlans", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "ListBackupPlans", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackupDRRestTransport._ListBackupPlans._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = backupplan.ListBackupPlansResponse() + pb_resp = backupplan.ListBackupPlansResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_backup_plans(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_backup_plans_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = backupplan.ListBackupPlansResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", + extra={ + "serviceName": "google.cloud.backupdr.v1.BackupDR", + "rpcName": "ListBackupPlans", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListBackups(_BaseBackupDRRestTransport._BaseListBackups, BackupDRRestStub): + def __hash__(self): + return hash("BackupDRRestTransport.ListBackups") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: backupvault.ListBackupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> backupvault.ListBackupsResponse: + r"""Call the list backups method over HTTP. + + Args: + request (~.backupvault.ListBackupsRequest): + The request object. Request message for listing Backups. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.backupvault.ListBackupsResponse: + Response message for listing Backups. + """ + + http_options = ( + _BaseBackupDRRestTransport._BaseListBackups._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_backups(request, metadata) + transcoded_request = ( + _BaseBackupDRRestTransport._BaseListBackups._get_transcoded_request( + http_options, request + ) ) # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListBackupPlanAssociations._get_query_params_json( - transcoded_request + query_params = ( + _BaseBackupDRRestTransport._BaseListBackups._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -4702,17 +6214,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupPlanAssociations", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackups", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupPlanAssociations", + "rpcName": "ListBackups", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._ListBackupPlanAssociations._get_response( + response = BackupDRRestTransport._ListBackups._get_response( self._host, metadata, query_params, @@ -4727,26 +6239,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupplanassociation.ListBackupPlanAssociationsResponse() - pb_resp = backupplanassociation.ListBackupPlanAssociationsResponse.pb(resp) + resp = backupvault.ListBackupsResponse() + pb_resp = backupvault.ListBackupsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_backup_plan_associations(resp) + resp = self._interceptor.post_list_backups(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - ( - resp, - _, - ) = self._interceptor.post_list_backup_plan_associations_with_metadata( + resp, _ = self._interceptor.post_list_backups_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupplanassociation.ListBackupPlanAssociationsResponse.to_json( - response - ) + response_payload = backupvault.ListBackupsResponse.to_json(response) except: response_payload = None http_response = { @@ -4755,21 +6262,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", + "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backups", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupPlanAssociations", + "rpcName": "ListBackups", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListBackupPlans( - _BaseBackupDRRestTransport._BaseListBackupPlans, BackupDRRestStub + class _ListBackupVaults( + _BaseBackupDRRestTransport._BaseListBackupVaults, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.ListBackupPlans") + return hash("BackupDRRestTransport.ListBackupVaults") @staticmethod def _get_response( @@ -4795,17 +6302,18 @@ def _get_response( def __call__( self, - request: backupplan.ListBackupPlansRequest, + request: backupvault.ListBackupVaultsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupplan.ListBackupPlansResponse: - r"""Call the list backup plans method over HTTP. + ) -> backupvault.ListBackupVaultsResponse: + r"""Call the list backup vaults method over HTTP. Args: - request (~.backupplan.ListBackupPlansRequest): - The request object. The request message for getting a list ``BackupPlan``. + request (~.backupvault.ListBackupVaultsRequest): + The request object. Request message for listing + backupvault stores. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4815,28 +6323,26 @@ def __call__( be of type `bytes`. Returns: - ~.backupplan.ListBackupPlansResponse: - The response message for getting a list of - ``BackupPlan``. + ~.backupvault.ListBackupVaultsResponse: + Response message for listing + BackupVaults. """ http_options = ( - _BaseBackupDRRestTransport._BaseListBackupPlans._get_http_options() + _BaseBackupDRRestTransport._BaseListBackupVaults._get_http_options() ) - request, metadata = self._interceptor.pre_list_backup_plans( + request, metadata = self._interceptor.pre_list_backup_vaults( request, metadata ) - transcoded_request = ( - _BaseBackupDRRestTransport._BaseListBackupPlans._get_transcoded_request( - http_options, request - ) + transcoded_request = _BaseBackupDRRestTransport._BaseListBackupVaults._get_transcoded_request( + http_options, request ) # Jsonify the query params query_params = ( - _BaseBackupDRRestTransport._BaseListBackupPlans._get_query_params_json( + _BaseBackupDRRestTransport._BaseListBackupVaults._get_query_params_json( transcoded_request ) ) @@ -4859,17 +6365,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupPlans", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupVaults", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupPlans", + "rpcName": "ListBackupVaults", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._ListBackupPlans._get_response( + response = BackupDRRestTransport._ListBackupVaults._get_response( self._host, metadata, query_params, @@ -4884,21 +6390,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupplan.ListBackupPlansResponse() - pb_resp = backupplan.ListBackupPlansResponse.pb(resp) + resp = backupvault.ListBackupVaultsResponse() + pb_resp = backupvault.ListBackupVaultsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_backup_plans(resp) + resp = self._interceptor.post_list_backup_vaults(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_plans_with_metadata( + resp, _ = self._interceptor.post_list_backup_vaults_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupplan.ListBackupPlansResponse.to_json( + response_payload = backupvault.ListBackupVaultsResponse.to_json( response ) except: @@ -4909,19 +6415,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", + "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupPlans", + "rpcName": "ListBackupVaults", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListBackups(_BaseBackupDRRestTransport._BaseListBackups, BackupDRRestStub): + class _ListDataSources( + _BaseBackupDRRestTransport._BaseListDataSources, BackupDRRestStub + ): def __hash__(self): - return hash("BackupDRRestTransport.ListBackups") + return hash("BackupDRRestTransport.ListDataSources") @staticmethod def _get_response( @@ -4947,17 +6455,18 @@ def _get_response( def __call__( self, - request: backupvault.ListBackupsRequest, + request: backupvault.ListDataSourcesRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.ListBackupsResponse: - r"""Call the list backups method over HTTP. + ) -> backupvault.ListDataSourcesResponse: + r"""Call the list data sources method over HTTP. Args: - request (~.backupvault.ListBackupsRequest): - The request object. Request message for listing Backups. + request (~.backupvault.ListDataSourcesRequest): + The request object. Request message for listing + DataSources. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4967,24 +6476,28 @@ def __call__( be of type `bytes`. Returns: - ~.backupvault.ListBackupsResponse: - Response message for listing Backups. + ~.backupvault.ListDataSourcesResponse: + Response message for listing + DataSources. + """ http_options = ( - _BaseBackupDRRestTransport._BaseListBackups._get_http_options() + _BaseBackupDRRestTransport._BaseListDataSources._get_http_options() ) - request, metadata = self._interceptor.pre_list_backups(request, metadata) + request, metadata = self._interceptor.pre_list_data_sources( + request, metadata + ) transcoded_request = ( - _BaseBackupDRRestTransport._BaseListBackups._get_transcoded_request( + _BaseBackupDRRestTransport._BaseListDataSources._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseBackupDRRestTransport._BaseListBackups._get_query_params_json( + _BaseBackupDRRestTransport._BaseListDataSources._get_query_params_json( transcoded_request ) ) @@ -5007,17 +6520,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackups", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListDataSources", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackups", + "rpcName": "ListDataSources", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._ListBackups._get_response( + response = BackupDRRestTransport._ListDataSources._get_response( self._host, metadata, query_params, @@ -5032,21 +6545,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupvault.ListBackupsResponse() - pb_resp = backupvault.ListBackupsResponse.pb(resp) + resp = backupvault.ListDataSourcesResponse() + pb_resp = backupvault.ListDataSourcesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_backups(resp) + resp = self._interceptor.post_list_data_sources(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backups_with_metadata( + resp, _ = self._interceptor.post_list_data_sources_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupvault.ListBackupsResponse.to_json(response) + response_payload = backupvault.ListDataSourcesResponse.to_json( + response + ) except: response_payload = None http_response = { @@ -5055,21 +6570,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backups", + "Received response for google.cloud.backupdr_v1.BackupDRClient.list_data_sources", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackups", + "rpcName": "ListDataSources", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListBackupVaults( - _BaseBackupDRRestTransport._BaseListBackupVaults, BackupDRRestStub + class _ListManagementServers( + _BaseBackupDRRestTransport._BaseListManagementServers, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.ListBackupVaults") + return hash("BackupDRRestTransport.ListManagementServers") @staticmethod def _get_response( @@ -5095,18 +6610,18 @@ def _get_response( def __call__( self, - request: backupvault.ListBackupVaultsRequest, + request: backupdr.ListManagementServersRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.ListBackupVaultsResponse: - r"""Call the list backup vaults method over HTTP. + ) -> backupdr.ListManagementServersResponse: + r"""Call the list management servers method over HTTP. Args: - request (~.backupvault.ListBackupVaultsRequest): + request (~.backupdr.ListManagementServersRequest): The request object. Request message for listing - backupvault stores. + management servers. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5116,28 +6631,26 @@ def __call__( be of type `bytes`. Returns: - ~.backupvault.ListBackupVaultsResponse: + ~.backupdr.ListManagementServersResponse: Response message for listing - BackupVaults. + management servers. """ http_options = ( - _BaseBackupDRRestTransport._BaseListBackupVaults._get_http_options() + _BaseBackupDRRestTransport._BaseListManagementServers._get_http_options() ) - request, metadata = self._interceptor.pre_list_backup_vaults( + request, metadata = self._interceptor.pre_list_management_servers( request, metadata ) - transcoded_request = _BaseBackupDRRestTransport._BaseListBackupVaults._get_transcoded_request( + transcoded_request = _BaseBackupDRRestTransport._BaseListManagementServers._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = ( - _BaseBackupDRRestTransport._BaseListBackupVaults._get_query_params_json( - transcoded_request - ) + query_params = _BaseBackupDRRestTransport._BaseListManagementServers._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5158,17 +6671,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListBackupVaults", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListManagementServers", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupVaults", + "rpcName": "ListManagementServers", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._ListBackupVaults._get_response( + response = BackupDRRestTransport._ListManagementServers._get_response( self._host, metadata, query_params, @@ -5183,21 +6696,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupvault.ListBackupVaultsResponse() - pb_resp = backupvault.ListBackupVaultsResponse.pb(resp) + resp = backupdr.ListManagementServersResponse() + pb_resp = backupdr.ListManagementServersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_backup_vaults(resp) + resp = self._interceptor.post_list_management_servers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_backup_vaults_with_metadata( + resp, _ = self._interceptor.post_list_management_servers_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupvault.ListBackupVaultsResponse.to_json( + response_payload = backupdr.ListManagementServersResponse.to_json( response ) except: @@ -5208,21 +6721,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", + "Received response for google.cloud.backupdr_v1.BackupDRClient.list_management_servers", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListBackupVaults", + "rpcName": "ListManagementServers", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListDataSources( - _BaseBackupDRRestTransport._BaseListDataSources, BackupDRRestStub + class _RestoreBackup( + _BaseBackupDRRestTransport._BaseRestoreBackup, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.ListDataSources") + return hash("BackupDRRestTransport.RestoreBackup") @staticmethod def _get_response( @@ -5243,23 +6756,24 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: backupvault.ListDataSourcesRequest, + request: backupvault.RestoreBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupvault.ListDataSourcesResponse: - r"""Call the list data sources method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the restore backup method over HTTP. Args: - request (~.backupvault.ListDataSourcesRequest): - The request object. Request message for listing - DataSources. + request (~.backupvault.RestoreBackupRequest): + The request object. Request message for restoring from a + Backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5269,28 +6783,31 @@ def __call__( be of type `bytes`. Returns: - ~.backupvault.ListDataSourcesResponse: - Response message for listing - DataSources. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseBackupDRRestTransport._BaseListDataSources._get_http_options() + _BaseBackupDRRestTransport._BaseRestoreBackup._get_http_options() ) - request, metadata = self._interceptor.pre_list_data_sources( - request, metadata - ) + request, metadata = self._interceptor.pre_restore_backup(request, metadata) transcoded_request = ( - _BaseBackupDRRestTransport._BaseListDataSources._get_transcoded_request( + _BaseBackupDRRestTransport._BaseRestoreBackup._get_transcoded_request( http_options, request ) ) + body = _BaseBackupDRRestTransport._BaseRestoreBackup._get_request_body_json( + transcoded_request + ) + # Jsonify the query params query_params = ( - _BaseBackupDRRestTransport._BaseListDataSources._get_query_params_json( + _BaseBackupDRRestTransport._BaseRestoreBackup._get_query_params_json( transcoded_request ) ) @@ -5303,7 +6820,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -5313,23 +6830,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListDataSources", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.RestoreBackup", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListDataSources", + "rpcName": "RestoreBackup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._ListDataSources._get_response( + response = BackupDRRestTransport._RestoreBackup._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -5338,23 +6856,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupvault.ListDataSourcesResponse() - pb_resp = backupvault.ListDataSourcesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_data_sources(resp) + resp = self._interceptor.post_restore_backup(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_data_sources_with_metadata( + resp, _ = self._interceptor.post_restore_backup_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER - try: - response_payload = backupvault.ListDataSourcesResponse.to_json( - response - ) + try: + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -5363,21 +6877,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_data_sources", + "Received response for google.cloud.backupdr_v1.BackupDRClient.restore_backup", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListDataSources", + "rpcName": "RestoreBackup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListManagementServers( - _BaseBackupDRRestTransport._BaseListManagementServers, BackupDRRestStub + class _TriggerBackup( + _BaseBackupDRRestTransport._BaseTriggerBackup, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.ListManagementServers") + return hash("BackupDRRestTransport.TriggerBackup") @staticmethod def _get_response( @@ -5398,23 +6912,24 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: backupdr.ListManagementServersRequest, + request: backupplanassociation.TriggerBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> backupdr.ListManagementServersResponse: - r"""Call the list management servers method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the trigger backup method over HTTP. Args: - request (~.backupdr.ListManagementServersRequest): - The request object. Request message for listing - management servers. + request (~.backupplanassociation.TriggerBackupRequest): + The request object. Request message for triggering a + backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5424,26 +6939,33 @@ def __call__( be of type `bytes`. Returns: - ~.backupdr.ListManagementServersResponse: - Response message for listing - management servers. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseBackupDRRestTransport._BaseListManagementServers._get_http_options() + _BaseBackupDRRestTransport._BaseTriggerBackup._get_http_options() ) - request, metadata = self._interceptor.pre_list_management_servers( - request, metadata + request, metadata = self._interceptor.pre_trigger_backup(request, metadata) + transcoded_request = ( + _BaseBackupDRRestTransport._BaseTriggerBackup._get_transcoded_request( + http_options, request + ) ) - transcoded_request = _BaseBackupDRRestTransport._BaseListManagementServers._get_transcoded_request( - http_options, request + + body = _BaseBackupDRRestTransport._BaseTriggerBackup._get_request_body_json( + transcoded_request ) # Jsonify the query params - query_params = _BaseBackupDRRestTransport._BaseListManagementServers._get_query_params_json( - transcoded_request + query_params = ( + _BaseBackupDRRestTransport._BaseTriggerBackup._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5454,7 +6976,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -5464,23 +6986,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.ListManagementServers", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.TriggerBackup", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListManagementServers", + "rpcName": "TriggerBackup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._ListManagementServers._get_response( + response = BackupDRRestTransport._TriggerBackup._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -5489,23 +7012,19 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = backupdr.ListManagementServersResponse() - pb_resp = backupdr.ListManagementServersResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_management_servers(resp) + resp = self._interceptor.post_trigger_backup(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_management_servers_with_metadata( + resp, _ = self._interceptor.post_trigger_backup_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = backupdr.ListManagementServersResponse.to_json( - response - ) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -5514,21 +7033,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "Received response for google.cloud.backupdr_v1.BackupDRClient.trigger_backup", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "ListManagementServers", + "rpcName": "TriggerBackup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _RestoreBackup( - _BaseBackupDRRestTransport._BaseRestoreBackup, BackupDRRestStub - ): + class _UpdateBackup(_BaseBackupDRRestTransport._BaseUpdateBackup, BackupDRRestStub): def __hash__(self): - return hash("BackupDRRestTransport.RestoreBackup") + return hash("BackupDRRestTransport.UpdateBackup") @staticmethod def _get_response( @@ -5555,17 +7072,17 @@ def _get_response( def __call__( self, - request: backupvault.RestoreBackupRequest, + request: backupvault.UpdateBackupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the restore backup method over HTTP. + r"""Call the update backup method over HTTP. Args: - request (~.backupvault.RestoreBackupRequest): - The request object. Request message for restoring from a + request (~.backupvault.UpdateBackupRequest): + The request object. Request message for updating a Backup. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -5584,23 +7101,23 @@ def __call__( """ http_options = ( - _BaseBackupDRRestTransport._BaseRestoreBackup._get_http_options() + _BaseBackupDRRestTransport._BaseUpdateBackup._get_http_options() ) - request, metadata = self._interceptor.pre_restore_backup(request, metadata) + request, metadata = self._interceptor.pre_update_backup(request, metadata) transcoded_request = ( - _BaseBackupDRRestTransport._BaseRestoreBackup._get_transcoded_request( + _BaseBackupDRRestTransport._BaseUpdateBackup._get_transcoded_request( http_options, request ) ) - body = _BaseBackupDRRestTransport._BaseRestoreBackup._get_request_body_json( + body = _BaseBackupDRRestTransport._BaseUpdateBackup._get_request_body_json( transcoded_request ) # Jsonify the query params query_params = ( - _BaseBackupDRRestTransport._BaseRestoreBackup._get_query_params_json( + _BaseBackupDRRestTransport._BaseUpdateBackup._get_query_params_json( transcoded_request ) ) @@ -5623,17 +7140,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.RestoreBackup", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.UpdateBackup", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "RestoreBackup", + "rpcName": "UpdateBackup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._RestoreBackup._get_response( + response = BackupDRRestTransport._UpdateBackup._get_response( self._host, metadata, query_params, @@ -5652,9 +7169,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_restore_backup(resp) + resp = self._interceptor.post_update_backup(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_restore_backup_with_metadata( + resp, _ = self._interceptor.post_update_backup_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5670,21 +7187,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.restore_backup", + "Received response for google.cloud.backupdr_v1.BackupDRClient.update_backup", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "RestoreBackup", + "rpcName": "UpdateBackup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _TriggerBackup( - _BaseBackupDRRestTransport._BaseTriggerBackup, BackupDRRestStub + class _UpdateBackupPlan( + _BaseBackupDRRestTransport._BaseUpdateBackupPlan, BackupDRRestStub ): def __hash__(self): - return hash("BackupDRRestTransport.TriggerBackup") + return hash("BackupDRRestTransport.UpdateBackupPlan") @staticmethod def _get_response( @@ -5711,18 +7228,18 @@ def _get_response( def __call__( self, - request: backupplanassociation.TriggerBackupRequest, + request: backupplan.UpdateBackupPlanRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the trigger backup method over HTTP. + r"""Call the update backup plan method over HTTP. Args: - request (~.backupplanassociation.TriggerBackupRequest): - The request object. Request message for triggering a - backup. + request (~.backupplan.UpdateBackupPlanRequest): + The request object. Request message for updating a backup + plan. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5740,23 +7257,25 @@ def __call__( """ http_options = ( - _BaseBackupDRRestTransport._BaseTriggerBackup._get_http_options() + _BaseBackupDRRestTransport._BaseUpdateBackupPlan._get_http_options() ) - request, metadata = self._interceptor.pre_trigger_backup(request, metadata) - transcoded_request = ( - _BaseBackupDRRestTransport._BaseTriggerBackup._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_update_backup_plan( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseUpdateBackupPlan._get_transcoded_request( + http_options, request ) - body = _BaseBackupDRRestTransport._BaseTriggerBackup._get_request_body_json( - transcoded_request + body = ( + _BaseBackupDRRestTransport._BaseUpdateBackupPlan._get_request_body_json( + transcoded_request + ) ) # Jsonify the query params query_params = ( - _BaseBackupDRRestTransport._BaseTriggerBackup._get_query_params_json( + _BaseBackupDRRestTransport._BaseUpdateBackupPlan._get_query_params_json( transcoded_request ) ) @@ -5779,17 +7298,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.TriggerBackup", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.UpdateBackupPlan", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "TriggerBackup", + "rpcName": "UpdateBackupPlan", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._TriggerBackup._get_response( + response = BackupDRRestTransport._UpdateBackupPlan._get_response( self._host, metadata, query_params, @@ -5808,9 +7327,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_trigger_backup(resp) + resp = self._interceptor.post_update_backup_plan(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_trigger_backup_with_metadata( + resp, _ = self._interceptor.post_update_backup_plan_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5826,19 +7345,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.trigger_backup", + "Received response for google.cloud.backupdr_v1.BackupDRClient.update_backup_plan", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "TriggerBackup", + "rpcName": "UpdateBackupPlan", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _UpdateBackup(_BaseBackupDRRestTransport._BaseUpdateBackup, BackupDRRestStub): + class _UpdateBackupPlanAssociation( + _BaseBackupDRRestTransport._BaseUpdateBackupPlanAssociation, BackupDRRestStub + ): def __hash__(self): - return hash("BackupDRRestTransport.UpdateBackup") + return hash("BackupDRRestTransport.UpdateBackupPlanAssociation") @staticmethod def _get_response( @@ -5865,54 +7386,53 @@ def _get_response( def __call__( self, - request: backupvault.UpdateBackupRequest, + request: backupplanassociation.UpdateBackupPlanAssociationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the update backup method over HTTP. + r"""Call the update backup plan + association method over HTTP. - Args: - request (~.backupvault.UpdateBackupRequest): - The request object. Request message for updating a - Backup. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.backupplanassociation.UpdateBackupPlanAssociationRequest): + The request object. Request message for updating a backup + plan association. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options = ( - _BaseBackupDRRestTransport._BaseUpdateBackup._get_http_options() + _BaseBackupDRRestTransport._BaseUpdateBackupPlanAssociation._get_http_options() ) - request, metadata = self._interceptor.pre_update_backup(request, metadata) - transcoded_request = ( - _BaseBackupDRRestTransport._BaseUpdateBackup._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_update_backup_plan_association( + request, metadata + ) + transcoded_request = _BaseBackupDRRestTransport._BaseUpdateBackupPlanAssociation._get_transcoded_request( + http_options, request ) - body = _BaseBackupDRRestTransport._BaseUpdateBackup._get_request_body_json( + body = _BaseBackupDRRestTransport._BaseUpdateBackupPlanAssociation._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = ( - _BaseBackupDRRestTransport._BaseUpdateBackup._get_query_params_json( - transcoded_request - ) + query_params = _BaseBackupDRRestTransport._BaseUpdateBackupPlanAssociation._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5933,17 +7453,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.backupdr_v1.BackupDRClient.UpdateBackup", + f"Sending request for google.cloud.backupdr_v1.BackupDRClient.UpdateBackupPlanAssociation", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "UpdateBackup", + "rpcName": "UpdateBackupPlanAssociation", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = BackupDRRestTransport._UpdateBackup._get_response( + response = BackupDRRestTransport._UpdateBackupPlanAssociation._get_response( self._host, metadata, query_params, @@ -5962,9 +7482,12 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_backup(resp) + resp = self._interceptor.post_update_backup_plan_association(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_update_backup_with_metadata( + ( + resp, + _, + ) = self._interceptor.post_update_backup_plan_association_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5980,10 +7503,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.backupdr_v1.BackupDRClient.update_backup", + "Received response for google.cloud.backupdr_v1.BackupDRClient.update_backup_plan_association", extra={ "serviceName": "google.cloud.backupdr.v1.BackupDR", - "rpcName": "UpdateBackup", + "rpcName": "UpdateBackupPlanAssociation", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -6380,6 +7903,28 @@ def delete_management_server( # In C++ this would require a dynamic_cast return self._DeleteManagementServer(self._session, self._host, self._interceptor) # type: ignore + @property + def fetch_backup_plan_associations_for_resource_type( + self, + ) -> Callable[ + [backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest], + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchBackupPlanAssociationsForResourceType(self._session, self._host, self._interceptor) # type: ignore + + @property + def fetch_data_source_references_for_resource_type( + self, + ) -> Callable[ + [datasourcereference.FetchDataSourceReferencesForResourceTypeRequest], + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FetchDataSourceReferencesForResourceType(self._session, self._host, self._interceptor) # type: ignore + @property def fetch_usable_backup_vaults( self, @@ -6418,6 +7963,16 @@ def get_backup_plan_association( # In C++ this would require a dynamic_cast return self._GetBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + @property + def get_backup_plan_revision( + self, + ) -> Callable[ + [backupplan.GetBackupPlanRevisionRequest], backupplan.BackupPlanRevision + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBackupPlanRevision(self._session, self._host, self._interceptor) # type: ignore + @property def get_backup_vault( self, @@ -6434,6 +7989,17 @@ def get_data_source( # In C++ this would require a dynamic_cast return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore + @property + def get_data_source_reference( + self, + ) -> Callable[ + [datasourcereference.GetDataSourceReferenceRequest], + datasourcereference.DataSourceReference, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataSourceReference(self._session, self._host, self._interceptor) # type: ignore + @property def get_management_server( self, @@ -6461,6 +8027,17 @@ def list_backup_plan_associations( # In C++ this would require a dynamic_cast return self._ListBackupPlanAssociations(self._session, self._host, self._interceptor) # type: ignore + @property + def list_backup_plan_revisions( + self, + ) -> Callable[ + [backupplan.ListBackupPlanRevisionsRequest], + backupplan.ListBackupPlanRevisionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBackupPlanRevisions(self._session, self._host, self._interceptor) # type: ignore + @property def list_backup_plans( self, @@ -6535,6 +8112,25 @@ def update_backup( # In C++ this would require a dynamic_cast return self._UpdateBackup(self._session, self._host, self._interceptor) # type: ignore + @property + def update_backup_plan( + self, + ) -> Callable[[backupplan.UpdateBackupPlanRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupPlan(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_backup_plan_association( + self, + ) -> Callable[ + [backupplanassociation.UpdateBackupPlanAssociationRequest], + operations_pb2.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBackupPlanAssociation(self._session, self._host, self._interceptor) # type: ignore + @property def update_backup_vault( self, diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py index ff2cddd67bc7..88c0190195f2 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/services/backup_dr/transports/rest_base.py @@ -29,6 +29,7 @@ backupplan, backupplanassociation, backupvault, + datasourcereference, ) from .base import DEFAULT_CLIENT_INFO, BackupDRTransport @@ -571,6 +572,110 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseFetchBackupPlanAssociationsForResourceType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "resourceType": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/backupPlanAssociations:fetchForResourceType", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseFetchBackupPlanAssociationsForResourceType._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseFetchDataSourceReferencesForResourceType: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "resourceType": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dataSourceReferences:fetchForResourceType", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest.pb( + request + ) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseFetchDataSourceReferencesForResourceType._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseFetchUsableBackupVaults: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -761,6 +866,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetBackupPlanRevision: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/backupPlans/*/revisions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backupplan.GetBackupPlanRevisionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseGetBackupPlanRevision._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetBackupVault: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -855,6 +1007,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetDataSourceReference: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataSourceReferences/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = datasourcereference.GetDataSourceReferenceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseGetDataSourceReference._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetManagementServer: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1008,6 +1207,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListBackupPlanRevisions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/backupPlans/*}/revisions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backupplan.ListBackupPlanRevisionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseListBackupPlanRevisions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListBackupPlans: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1416,6 +1662,126 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateBackupPlan: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{backup_plan.name=projects/*/locations/*/backupPlans/*}", + "body": "backup_plan", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backupplan.UpdateBackupPlanRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseUpdateBackupPlan._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateBackupPlanAssociation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{backup_plan_association.name=projects/*/locations/*/backupPlanAssociations/*}", + "body": "backup_plan_association", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = backupplanassociation.UpdateBackupPlanAssociationRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBackupDRRestTransport._BaseUpdateBackupPlanAssociation._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateBackupVault: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py index c536c8c8d1b7..1c31717b1f55 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/__init__.py @@ -30,25 +30,33 @@ ) from .backupplan import ( BackupPlan, + BackupPlanRevision, BackupRule, BackupWindow, CreateBackupPlanRequest, DeleteBackupPlanRequest, GetBackupPlanRequest, + GetBackupPlanRevisionRequest, + ListBackupPlanRevisionsRequest, + ListBackupPlanRevisionsResponse, ListBackupPlansRequest, ListBackupPlansResponse, StandardSchedule, + UpdateBackupPlanRequest, WeekDayOfMonth, ) from .backupplanassociation import ( BackupPlanAssociation, CreateBackupPlanAssociationRequest, DeleteBackupPlanAssociationRequest, + FetchBackupPlanAssociationsForResourceTypeRequest, + FetchBackupPlanAssociationsForResourceTypeResponse, GetBackupPlanAssociationRequest, ListBackupPlanAssociationsRequest, ListBackupPlanAssociationsResponse, RuleConfigInfo, TriggerBackupRequest, + UpdateBackupPlanAssociationRequest, ) from .backupvault import ( Backup, @@ -88,6 +96,20 @@ UpdateDataSourceRequest, ) from .backupvault_ba import BackupApplianceBackupProperties +from .backupvault_cloudsql import ( + CloudSqlInstanceBackupPlanAssociationProperties, + CloudSqlInstanceBackupProperties, + CloudSqlInstanceDataSourceProperties, + CloudSqlInstanceDataSourceReferenceProperties, + CloudSqlInstanceInitializationConfig, +) +from .backupvault_disk import ( + DiskBackupProperties, + DiskDataSourceProperties, + DiskRestoreProperties, + DiskTargetEnvironment, + RegionDiskTargetEnvironment, +) from .backupvault_gce import ( AcceleratorConfig, AccessConfig, @@ -114,6 +136,14 @@ ServiceAccount, Tags, ) +from .datasourcereference import ( + DataSourceBackupConfigInfo, + DataSourceGcpResourceInfo, + DataSourceReference, + FetchDataSourceReferencesForResourceTypeRequest, + FetchDataSourceReferencesForResourceTypeResponse, + GetDataSourceReferenceRequest, +) __all__ = ( "CreateManagementServerRequest", @@ -130,23 +160,31 @@ "WorkforceIdentityBasedManagementURI", "WorkforceIdentityBasedOAuth2ClientID", "BackupPlan", + "BackupPlanRevision", "BackupRule", "BackupWindow", "CreateBackupPlanRequest", "DeleteBackupPlanRequest", "GetBackupPlanRequest", + "GetBackupPlanRevisionRequest", + "ListBackupPlanRevisionsRequest", + "ListBackupPlanRevisionsResponse", "ListBackupPlansRequest", "ListBackupPlansResponse", "StandardSchedule", + "UpdateBackupPlanRequest", "WeekDayOfMonth", "BackupPlanAssociation", "CreateBackupPlanAssociationRequest", "DeleteBackupPlanAssociationRequest", + "FetchBackupPlanAssociationsForResourceTypeRequest", + "FetchBackupPlanAssociationsForResourceTypeResponse", "GetBackupPlanAssociationRequest", "ListBackupPlanAssociationsRequest", "ListBackupPlanAssociationsResponse", "RuleConfigInfo", "TriggerBackupRequest", + "UpdateBackupPlanAssociationRequest", "Backup", "BackupApplianceBackupConfig", "BackupApplianceLockInfo", @@ -183,6 +221,16 @@ "BackupVaultView", "BackupView", "BackupApplianceBackupProperties", + "CloudSqlInstanceBackupPlanAssociationProperties", + "CloudSqlInstanceBackupProperties", + "CloudSqlInstanceDataSourceProperties", + "CloudSqlInstanceDataSourceReferenceProperties", + "CloudSqlInstanceInitializationConfig", + "DiskBackupProperties", + "DiskDataSourceProperties", + "DiskRestoreProperties", + "DiskTargetEnvironment", + "RegionDiskTargetEnvironment", "AcceleratorConfig", "AccessConfig", "AdvancedMachineFeatures", @@ -207,4 +255,10 @@ "ServiceAccount", "Tags", "KeyRevocationActionType", + "DataSourceBackupConfigInfo", + "DataSourceGcpResourceInfo", + "DataSourceReference", + "FetchDataSourceReferencesForResourceTypeRequest", + "FetchDataSourceReferencesForResourceTypeResponse", + "GetDataSourceReferenceRequest", ) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py index b8ac02d792c8..376c9929660b 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupdr.py @@ -21,6 +21,8 @@ from google.protobuf import wrappers_pb2 # type: ignore import proto # type: ignore +from google.cloud.backupdr_v1.types import backupvault_cloudsql + __protobuf__ = proto.module( package="google.cloud.backupdr.v1", manifest={ @@ -555,6 +557,8 @@ class DeleteManagementServerRequest(proto.Message): class InitializeServiceRequest(proto.Message): r"""Request message for initializing the service. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Required. The resource name of the serviceConfig used to @@ -585,6 +589,11 @@ class InitializeServiceRequest(proto.Message): The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000). + cloud_sql_instance_initialization_config (google.cloud.backupdr_v1.types.CloudSqlInstanceInitializationConfig): + Optional. The configuration for initializing + a Cloud SQL instance. + + This field is a member of `oneof`_ ``initialization_config``. """ name: str = proto.Field( @@ -599,6 +608,12 @@ class InitializeServiceRequest(proto.Message): proto.STRING, number=3, ) + cloud_sql_instance_initialization_config: backupvault_cloudsql.CloudSqlInstanceInitializationConfig = proto.Field( + proto.MESSAGE, + number=4, + oneof="initialization_config", + message=backupvault_cloudsql.CloudSqlInstanceInitializationConfig, + ) class InitializeServiceResponse(proto.Message): diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py index 931588a70960..eab394c22b70 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplan.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.type import dayofweek_pb2 # type: ignore from google.type import month_pb2 # type: ignore @@ -35,6 +36,11 @@ "ListBackupPlansResponse", "GetBackupPlanRequest", "DeleteBackupPlanRequest", + "UpdateBackupPlanRequest", + "BackupPlanRevision", + "GetBackupPlanRevisionRequest", + "ListBackupPlanRevisionsRequest", + "ListBackupPlanRevisionsResponse", }, ) @@ -77,8 +83,9 @@ class BackupPlan(proto.Message): Required. The resource type to which the ``BackupPlan`` will be applied. Examples include, "compute.googleapis.com/Instance", - "sqladmin.googleapis.com/Instance", or - "alloydb.googleapis.com/Cluster". + "sqladmin.googleapis.com/Instance", + "alloydb.googleapis.com/Cluster", + "compute.googleapis.com/Disk". etag (str): Optional. ``etag`` is returned from the service in the response. As a user of the service, you may provide an etag @@ -94,6 +101,25 @@ class BackupPlan(proto.Message): Service Account to be used by the BackupVault for taking backups. Specify the email address of the Backup Vault Service Account. + log_retention_days (int): + Optional. Applicable only for CloudSQL resource_type. + + Configures how long logs will be stored. It is defined in + “days”. This value should be greater than or equal to + minimum enforced log retention duration of the backup vault. + supported_resource_types (MutableSequence[str]): + Output only. All resource types to which + backupPlan can be applied. + revision_id (str): + Output only. The user friendly revision ID of the + ``BackupPlanRevision``. + + Example: v0, v1, v2, etc. + revision_name (str): + Output only. The resource id of the ``BackupPlanRevision``. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}/revisions/{revision_id}`` """ class State(proto.Enum): @@ -112,12 +138,15 @@ class State(proto.Enum): INACTIVE (4): The resource has been created but is not usable. + UPDATING (5): + The resource is being updated. """ STATE_UNSPECIFIED = 0 CREATING = 1 ACTIVE = 2 DELETING = 3 INACTIVE = 4 + UPDATING = 5 name: str = proto.Field( proto.STRING, @@ -168,6 +197,22 @@ class State(proto.Enum): proto.STRING, number=11, ) + log_retention_days: int = proto.Field( + proto.INT64, + number=12, + ) + supported_resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=13, + ) + revision_id: str = proto.Field( + proto.STRING, + number=14, + ) + revision_name: str = proto.Field( + proto.STRING, + number=15, + ) class BackupRule(proto.Message): @@ -189,15 +234,12 @@ class BackupRule(proto.Message): equal to minimum enforced retention of the backup vault. - Minimum value is 1 and maximum value is 90 for - hourly backups. Minimum value is 1 and maximum - value is 90 for daily backups. Minimum value is - 7 and maximum value is 186 for weekly backups. - Minimum value is 30 and maximum value is 732 for - monthly backups. Minimum value is 365 and - maximum value is 36159 for yearly backups. + Minimum value is 1 and maximum value is 36159 + for custom retention on-demand backup. + Minimum and maximum values are workload specific + for all other rules. standard_schedule (google.cloud.backupdr_v1.types.StandardSchedule): - Required. Defines a schedule that runs within + Optional. Defines a schedule that runs within the confines of a defined window of time. This field is a member of `oneof`_ ``backup_schedule_oneof``. @@ -236,7 +278,7 @@ class StandardSchedule(proto.Message): not applicable otherwise. A validation error will occur if a value is supplied and ``recurrence_type`` is not ``HOURLY``. - Value of hourly frequency should be between 6 and 23. + Value of hourly frequency should be between 4 and 23. Reason for limit : We found that there is bandwidth limitation of 3GB/S for GMI while taking a backup and 5GB/S @@ -650,4 +692,234 @@ class DeleteBackupPlanRequest(proto.Message): ) +class UpdateBackupPlanRequest(proto.Message): + r"""Request message for updating a backup plan. + + Attributes: + backup_plan (google.cloud.backupdr_v1.types.BackupPlan): + Required. The resource being updated + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Field mask is used + to specify the fields to be overwritten in the BackupPlan + resource by the update. The fields specified in the + update_mask are relative to the resource, not the full + request. A field will be overwritten if it is in the mask. + If the user does not provide a mask then the request will + fail. Currently, these fields are supported in update: + description, schedules, retention period, adding and + removing Backup Rules. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + backup_plan: "BackupPlan" = proto.Field( + proto.MESSAGE, + number=1, + message="BackupPlan", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BackupPlanRevision(proto.Message): + r"""``BackupPlanRevision`` represents a snapshot of a ``BackupPlan`` at + a point in time. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + ``BackupPlanRevision``. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}/revisions/{revision}`` + revision_id (str): + Output only. The user friendly revision ID of the + ``BackupPlanRevision``. + + Example: v0, v1, v2, etc. + state (google.cloud.backupdr_v1.types.BackupPlanRevision.State): + Output only. Resource State + backup_plan_snapshot (google.cloud.backupdr_v1.types.BackupPlan): + The Backup Plan being encompassed by this + revision. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp that the revision + was created. + """ + + class State(proto.Enum): + r"""The state of the ``BackupPlanRevision``. + + Values: + STATE_UNSPECIFIED (0): + State not set. + CREATING (1): + The resource is being created. + ACTIVE (2): + The resource has been created and is fully + usable. + DELETING (3): + The resource is being deleted. + INACTIVE (4): + The resource has been created but is not + usable. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + DELETING = 3 + INACTIVE = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + revision_id: str = proto.Field( + proto.STRING, + number=2, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + backup_plan_snapshot: "BackupPlan" = proto.Field( + proto.MESSAGE, + number=4, + message="BackupPlan", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class GetBackupPlanRevisionRequest(proto.Message): + r"""The request message for getting a ``BackupPlanRevision``. + + Attributes: + name (str): + Required. The resource name of the ``BackupPlanRevision`` to + retrieve. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}/revisions/{revision}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListBackupPlanRevisionsRequest(proto.Message): + r"""The request message for getting a list of ``BackupPlanRevision``. + + Attributes: + parent (str): + Required. The project and location for which to retrieve + ``BackupPlanRevisions`` information. Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}``. + In Cloud BackupDR, locations map to GCP regions, for e.g. + **us-central1**. + page_size (int): + Optional. The maximum number of ``BackupPlans`` to return in + a single response. If not specified, a default value will be + chosen by the service. Note that the response may include a + partial list and a caller should only rely on the response's + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + Optional. The value of + [next_page_token][google.cloud.backupdr.v1.ListBackupPlansResponse.next_page_token] + received from a previous ``ListBackupPlans`` call. Provide + this to retrieve the subsequent page in a multi-page list of + results. When paginating, all other parameters provided to + ``ListBackupPlans`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListBackupPlanRevisionsResponse(proto.Message): + r"""The response message for getting a list of ``BackupPlanRevision``. + + Attributes: + backup_plan_revisions (MutableSequence[google.cloud.backupdr_v1.types.BackupPlanRevision]): + The list of ``BackupPlanRevisions`` in the project for the + specified location. + + If the ``{location}`` value in the request is "-", the + response contains a list of resources from all locations. In + case any location is unreachable, the response will only + return backup plans in reachable locations and the + 'unreachable' field will be populated with a list of + unreachable locations. + next_page_token (str): + A token which may be sent as + [page_token][google.cloud.backupdr.v1.ListBackupPlanRevisionsRequest.page_token] + in a subsequent ``ListBackupPlanRevisions`` call to retrieve + the next page of results. If this field is omitted or empty, + then there are no more results to return. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + backup_plan_revisions: MutableSequence["BackupPlanRevision"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlanRevision", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py index afff98c22083..5273b2634f65 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupplanassociation.py @@ -17,10 +17,13 @@ from typing import MutableMapping, MutableSequence +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore import proto # type: ignore +from google.cloud.backupdr_v1.types import backupvault_cloudsql + __protobuf__ = proto.module( package="google.cloud.backupdr.v1", manifest={ @@ -29,8 +32,11 @@ "CreateBackupPlanAssociationRequest", "ListBackupPlanAssociationsRequest", "ListBackupPlanAssociationsResponse", + "FetchBackupPlanAssociationsForResourceTypeRequest", + "FetchBackupPlanAssociationsForResourceTypeResponse", "GetBackupPlanAssociationRequest", "DeleteBackupPlanAssociationRequest", + "UpdateBackupPlanAssociationRequest", "TriggerBackupRequest", }, ) @@ -41,6 +47,9 @@ class BackupPlanAssociation(proto.Message): BackupPlanAssociation which contains details like workload, backup plan etc + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Output only. Identifier. The resource name of @@ -52,7 +61,13 @@ class BackupPlanAssociation(proto.Message): workload on which backupplan is applied resource (str): Required. Immutable. Resource name of - workload on which backupplan is applied + workload on which the backup plan is applied. + + The format can either be the resource name + (e.g., + "projects/my-project/zones/us-central1-a/instances/my-instance") + or the full resource URI (e.g., + "https://www.googleapis.com/compute/v1/projects/my-project/zones/us-central1-a/instances/my-instance"). backup_plan (str): Required. Resource name of backup plan which needs to be applied on workload. Format: @@ -76,6 +91,21 @@ class BackupPlanAssociation(proto.Message): backups taken. Format : projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource} + cloud_sql_instance_backup_plan_association_properties (google.cloud.backupdr_v1.types.CloudSqlInstanceBackupPlanAssociationProperties): + Output only. Cloud SQL instance's backup plan + association properties. + + This field is a member of `oneof`_ ``resource_properties``. + backup_plan_revision_id (str): + Output only. The user friendly revision ID of the + ``BackupPlanRevision``. + + Example: v0, v1, v2, etc. + backup_plan_revision_name (str): + Output only. The resource id of the ``BackupPlanRevision``. + + Format: + ``projects/{project}/locations/{location}/backupPlans/{backup_plan}/revisions/{revision_id}`` """ class State(proto.Enum): @@ -94,12 +124,15 @@ class State(proto.Enum): INACTIVE (4): The resource has been created but is not usable. + UPDATING (5): + The resource is being updated. """ STATE_UNSPECIFIED = 0 CREATING = 1 ACTIVE = 2 DELETING = 3 INACTIVE = 4 + UPDATING = 5 name: str = proto.Field( proto.STRING, @@ -141,6 +174,20 @@ class State(proto.Enum): proto.STRING, number=9, ) + cloud_sql_instance_backup_plan_association_properties: backupvault_cloudsql.CloudSqlInstanceBackupPlanAssociationProperties = proto.Field( + proto.MESSAGE, + number=10, + oneof="resource_properties", + message=backupvault_cloudsql.CloudSqlInstanceBackupPlanAssociationProperties, + ) + backup_plan_revision_id: str = proto.Field( + proto.STRING, + number=11, + ) + backup_plan_revision_name: str = proto.Field( + proto.STRING, + number=12, + ) class RuleConfigInfo(proto.Message): @@ -343,6 +390,110 @@ def raw_page(self): ) +class FetchBackupPlanAssociationsForResourceTypeRequest(proto.Message): + r"""Request for the FetchBackupPlanAssociationsForResourceType + method. + + Attributes: + parent (str): + Required. The parent resource name. + Format: projects/{project}/locations/{location} + resource_type (str): + Required. The type of the GCP resource. + Ex: sql.googleapis.com/Instance + page_size (int): + Optional. The maximum number of + BackupPlanAssociations to return. The service + may return fewer than this value. If + unspecified, at most 50 BackupPlanAssociations + will be returned. The maximum value is 100; + values above 100 will be coerced to 100. + page_token (str): + Optional. A page token, received from a previous call of + ``FetchBackupPlanAssociationsForResourceType``. Provide this + to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``FetchBackupPlanAssociationsForResourceType`` must match + the call that provided the page token. + filter (str): + Optional. A filter expression that filters the results + fetched in the response. The expression must specify the + field name, a comparison operator, and the value that you + want to use for filtering. Supported fields: + + - resource + - backup_plan + - state + - data_source + - cloud_sql_instance_backup_plan_association_properties.instance_create_time + order_by (str): + Optional. A comma-separated list of fields to order by, + sorted in ascending order. Use "desc" after a field name for + descending. + + Supported fields: + + - name + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + + +class FetchBackupPlanAssociationsForResourceTypeResponse(proto.Message): + r"""Response for the FetchBackupPlanAssociationsForResourceType + method. + + Attributes: + backup_plan_associations (MutableSequence[google.cloud.backupdr_v1.types.BackupPlanAssociation]): + Output only. The BackupPlanAssociations from + the specified parent. + next_page_token (str): + Output only. A token, which can be sent as ``page_token`` to + retrieve the next page. If this field is omitted, there are + no subsequent pages. + """ + + @property + def raw_page(self): + return self + + backup_plan_associations: MutableSequence[ + "BackupPlanAssociation" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="BackupPlanAssociation", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + class GetBackupPlanAssociationRequest(proto.Message): r"""Request message for getting a BackupPlanAssociation resource. @@ -399,6 +550,60 @@ class DeleteBackupPlanAssociationRequest(proto.Message): ) +class UpdateBackupPlanAssociationRequest(proto.Message): + r"""Request message for updating a backup plan association. + + Attributes: + backup_plan_association (google.cloud.backupdr_v1.types.BackupPlanAssociation): + Required. The resource being updated + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. Field mask is used + to specify the fields to be overwritten in the + BackupPlanAssociation resource by the update. The fields + specified in the update_mask are relative to the resource, + not the full request. A field will be overwritten if it is + in the mask. If the user does not provide a mask then the + request will fail. Currently + backup_plan_association.backup_plan is the only supported + field. + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and t he request times out. + If you make the request again with the same + request ID, the server can check if original + operation with the same request ID was received, + and if so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + backup_plan_association: "BackupPlanAssociation" = proto.Field( + proto.MESSAGE, + number=1, + message="BackupPlanAssociation", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + class TriggerBackupRequest(proto.Message): r"""Request message for triggering a backup. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py index e7695b6269c2..1798a7392812 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault.py @@ -23,7 +23,12 @@ from google.rpc import status_pb2 # type: ignore import proto # type: ignore -from google.cloud.backupdr_v1.types import backupvault_ba, backupvault_gce +from google.cloud.backupdr_v1.types import ( + backupvault_ba, + backupvault_cloudsql, + backupvault_disk, + backupvault_gce, +) __protobuf__ = proto.module( package="google.cloud.backupdr.v1", @@ -229,12 +234,15 @@ class State(proto.Enum): ERROR (4): The backup vault is experiencing an issue and might be unusable. + UPDATING (5): + The backup vault is being updated. """ STATE_UNSPECIFIED = 0 CREATING = 1 ACTIVE = 2 DELETING = 3 ERROR = 4 + UPDATING = 5 class AccessRestriction(proto.Enum): r"""Holds the access restriction for the backup vault. @@ -413,6 +421,9 @@ class DataSource(proto.Message): application. This field is a member of `oneof`_ ``source_resource``. + backup_blocked_by_vault_access_restriction (bool): + Output only. This field is set to true if the + backup is blocked by vault access restriction. """ class State(proto.Enum): @@ -503,6 +514,10 @@ class State(proto.Enum): message="DataSourceBackupApplianceApplication", ) ) + backup_blocked_by_vault_access_restriction: bool = proto.Field( + proto.BOOL, + number=28, + ) class BackupConfigInfo(proto.Message): @@ -606,6 +621,11 @@ class GcpBackupConfig(proto.Message): backup_plan_rules (MutableSequence[str]): The names of the backup plan rules which point to this backupvault + backup_plan_revision_name (str): + The name of the backup plan revision. + backup_plan_revision_id (str): + The user friendly id of the backup plan + revision. E.g. v0, v1 etc. """ backup_plan: str = proto.Field( @@ -624,6 +644,14 @@ class GcpBackupConfig(proto.Message): proto.STRING, number=4, ) + backup_plan_revision_name: str = proto.Field( + proto.STRING, + number=5, + ) + backup_plan_revision_id: str = proto.Field( + proto.STRING, + number=6, + ) class BackupApplianceBackupConfig(proto.Message): @@ -685,6 +713,10 @@ class DataSourceGcpResource(proto.Message): are Google Cloud Resources. This name is easeier to understand than GcpResourceDataSource or GcpDataSourceResource + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -704,6 +736,19 @@ class DataSourceGcpResource(proto.Message): subset of Compute Instance properties that are useful at the Datasource level. + This field is a member of `oneof`_ ``gcp_resource_properties``. + cloud_sql_instance_datasource_properties (google.cloud.backupdr_v1.types.CloudSqlInstanceDataSourceProperties): + Output only. + CloudSqlInstanceDataSourceProperties has a + subset of Cloud SQL Instance properties that are + useful at the Datasource level. + + This field is a member of `oneof`_ ``gcp_resource_properties``. + disk_datasource_properties (google.cloud.backupdr_v1.types.DiskDataSourceProperties): + DiskDataSourceProperties has a subset of Disk + properties that are useful at the Datasource + level. + This field is a member of `oneof`_ ``gcp_resource_properties``. """ @@ -725,6 +770,18 @@ class DataSourceGcpResource(proto.Message): oneof="gcp_resource_properties", message=backupvault_gce.ComputeInstanceDataSourceProperties, ) + cloud_sql_instance_datasource_properties: backupvault_cloudsql.CloudSqlInstanceDataSourceProperties = proto.Field( + proto.MESSAGE, + number=5, + oneof="gcp_resource_properties", + message=backupvault_cloudsql.CloudSqlInstanceDataSourceProperties, + ) + disk_datasource_properties: backupvault_disk.DiskDataSourceProperties = proto.Field( + proto.MESSAGE, + number=7, + oneof="gcp_resource_properties", + message=backupvault_disk.DiskDataSourceProperties, + ) class DataSourceBackupApplianceApplication(proto.Message): @@ -985,11 +1042,20 @@ class Backup(proto.Message): Output only. Compute Engine specific backup properties. + This field is a member of `oneof`_ ``backup_properties``. + cloud_sql_instance_backup_properties (google.cloud.backupdr_v1.types.CloudSqlInstanceBackupProperties): + Output only. Cloud SQL specific backup + properties. + This field is a member of `oneof`_ ``backup_properties``. backup_appliance_backup_properties (google.cloud.backupdr_v1.types.BackupApplianceBackupProperties): Output only. Backup Appliance specific backup properties. + This field is a member of `oneof`_ ``backup_properties``. + disk_backup_properties (google.cloud.backupdr_v1.types.DiskBackupProperties): + Output only. Disk specific backup properties. + This field is a member of `oneof`_ ``backup_properties``. backup_type (google.cloud.backupdr_v1.types.Backup.BackupType): Output only. Type of the backup, unspecified, @@ -1002,6 +1068,16 @@ class Backup(proto.Message): resource_size_bytes (int): Output only. source resource size in bytes at the time of the backup. + satisfies_pzs (bool): + Optional. Output only. Reserved for future + use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. + satisfies_pzi (bool): + Optional. Output only. Reserved for future + use. + + This field is a member of `oneof`_ ``_satisfies_pzi``. """ class State(proto.Enum): @@ -1020,12 +1096,15 @@ class State(proto.Enum): ERROR (4): The backup is experiencing an issue and might be unusable. + UPLOADING (5): + The backup is being uploaded. """ STATE_UNSPECIFIED = 0 CREATING = 1 ACTIVE = 2 DELETING = 3 ERROR = 4 + UPLOADING = 5 class BackupType(proto.Enum): r"""Type of the backup, scheduled or ondemand. @@ -1037,10 +1116,13 @@ class BackupType(proto.Enum): Scheduled backup. ON_DEMAND (2): On demand backup. + ON_DEMAND_OPERATIONAL (3): + Operational backup. """ BACKUP_TYPE_UNSPECIFIED = 0 SCHEDULED = 1 ON_DEMAND = 2 + ON_DEMAND_OPERATIONAL = 3 class GCPBackupPlanInfo(proto.Message): r"""GCPBackupPlanInfo captures the plan configuration details of @@ -1057,6 +1139,16 @@ class GCPBackupPlanInfo(proto.Message): The rule id of the backup plan which triggered this backup in case of scheduled backup or used for + backup_plan_revision_name (str): + Resource name of the backup plan revision + which triggered this backup in case of scheduled + backup or used for on demand backup. Format: + + projects/{project}/locations/{location}/backupPlans/{backupPlanId}/revisions/{revisionId} + backup_plan_revision_id (str): + The user friendly id of the backup plan + revision which triggered this backup in case of + scheduled backup or used for on demand backup. """ backup_plan: str = proto.Field( @@ -1067,6 +1159,14 @@ class GCPBackupPlanInfo(proto.Message): proto.STRING, number=2, ) + backup_plan_revision_name: str = proto.Field( + proto.STRING, + number=3, + ) + backup_plan_revision_id: str = proto.Field( + proto.STRING, + number=4, + ) name: str = proto.Field( proto.STRING, @@ -1138,12 +1238,24 @@ class GCPBackupPlanInfo(proto.Message): oneof="backup_properties", message=backupvault_gce.ComputeInstanceBackupProperties, ) + cloud_sql_instance_backup_properties: backupvault_cloudsql.CloudSqlInstanceBackupProperties = proto.Field( + proto.MESSAGE, + number=26, + oneof="backup_properties", + message=backupvault_cloudsql.CloudSqlInstanceBackupProperties, + ) backup_appliance_backup_properties: backupvault_ba.BackupApplianceBackupProperties = proto.Field( proto.MESSAGE, number=21, oneof="backup_properties", message=backupvault_ba.BackupApplianceBackupProperties, ) + disk_backup_properties: backupvault_disk.DiskBackupProperties = proto.Field( + proto.MESSAGE, + number=28, + oneof="backup_properties", + message=backupvault_disk.DiskBackupProperties, + ) backup_type: BackupType = proto.Field( proto.ENUM, number=20, @@ -1159,6 +1271,16 @@ class GCPBackupPlanInfo(proto.Message): proto.INT64, number=23, ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=24, + optional=True, + ) + satisfies_pzi: bool = proto.Field( + proto.BOOL, + number=25, + optional=True, + ) class CreateBackupVaultRequest(proto.Message): @@ -1469,6 +1591,11 @@ class UpdateBackupVaultRequest(proto.Message): Optional. If set to true, will not check plan duration against backup vault enforcement duration. + force_update_access_restriction (bool): + Optional. If set to true, we will force + update access restriction even if some non + compliant data sources are present. The default + is 'false'. """ update_mask: field_mask_pb2.FieldMask = proto.Field( @@ -1493,6 +1620,10 @@ class UpdateBackupVaultRequest(proto.Message): proto.BOOL, number=5, ) + force_update_access_restriction: bool = proto.Field( + proto.BOOL, + number=6, + ) class DeleteBackupVaultRequest(proto.Message): @@ -1946,6 +2077,11 @@ class DeleteBackupRequest(proto.Message): class RestoreBackupRequest(proto.Message): r"""Request message for restoring from a Backup. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -1977,11 +2113,26 @@ class RestoreBackupRequest(proto.Message): Compute Engine target environment to be used during restore. + This field is a member of `oneof`_ ``target_environment``. + disk_target_environment (google.cloud.backupdr_v1.types.DiskTargetEnvironment): + Disk target environment to be used during + restore. + + This field is a member of `oneof`_ ``target_environment``. + region_disk_target_environment (google.cloud.backupdr_v1.types.RegionDiskTargetEnvironment): + Region disk target environment to be used + during restore. + This field is a member of `oneof`_ ``target_environment``. compute_instance_restore_properties (google.cloud.backupdr_v1.types.ComputeInstanceRestoreProperties): Compute Engine instance properties to be overridden during restore. + This field is a member of `oneof`_ ``instance_properties``. + disk_restore_properties (google.cloud.backupdr_v1.types.DiskRestoreProperties): + Disk properties to be overridden during + restore. + This field is a member of `oneof`_ ``instance_properties``. """ @@ -1999,12 +2150,32 @@ class RestoreBackupRequest(proto.Message): oneof="target_environment", message=backupvault_gce.ComputeInstanceTargetEnvironment, ) + disk_target_environment: backupvault_disk.DiskTargetEnvironment = proto.Field( + proto.MESSAGE, + number=5, + oneof="target_environment", + message=backupvault_disk.DiskTargetEnvironment, + ) + region_disk_target_environment: backupvault_disk.RegionDiskTargetEnvironment = ( + proto.Field( + proto.MESSAGE, + number=6, + oneof="target_environment", + message=backupvault_disk.RegionDiskTargetEnvironment, + ) + ) compute_instance_restore_properties: backupvault_gce.ComputeInstanceRestoreProperties = proto.Field( proto.MESSAGE, number=4, oneof="instance_properties", message=backupvault_gce.ComputeInstanceRestoreProperties, ) + disk_restore_properties: backupvault_disk.DiskRestoreProperties = proto.Field( + proto.MESSAGE, + number=7, + oneof="instance_properties", + message=backupvault_disk.DiskRestoreProperties, + ) class RestoreBackupResponse(proto.Message): diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_cloudsql.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_cloudsql.py new file mode 100644 index 000000000000..559ff3aacf47 --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_cloudsql.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "CloudSqlInstanceDataSourceProperties", + "CloudSqlInstanceBackupProperties", + "CloudSqlInstanceDataSourceReferenceProperties", + "CloudSqlInstanceInitializationConfig", + "CloudSqlInstanceBackupPlanAssociationProperties", + }, +) + + +class CloudSqlInstanceDataSourceProperties(proto.Message): + r"""CloudSqlInstanceDataSourceProperties represents the + properties of a Cloud SQL resource that are stored in the + DataSource. + + Attributes: + name (str): + Output only. Name of the Cloud SQL instance + backed up by the datasource. Format: + + projects/{project}/instances/{instance} + database_installed_version (str): + Output only. The installed database version + of the Cloud SQL instance. + instance_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instance creation timestamp. + instance_tier (str): + Output only. The tier (or machine type) for this instance. + Example: ``db-custom-1-3840`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + database_installed_version: str = proto.Field( + proto.STRING, + number=2, + ) + instance_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + instance_tier: str = proto.Field( + proto.STRING, + number=5, + ) + + +class CloudSqlInstanceBackupProperties(proto.Message): + r"""CloudSqlInstanceBackupProperties represents Cloud SQL + Instance Backup properties. + + Attributes: + database_installed_version (str): + Output only. The installed database version + of the Cloud SQL instance when the backup was + taken. + final_backup (bool): + Output only. Whether the backup is a final + backup. + source_instance (str): + Output only. The source instance of the + backup. Format: + + projects/{project}/instances/{instance} + instance_tier (str): + Output only. The tier (or machine type) for this instance. + Example: ``db-custom-1-3840`` + """ + + database_installed_version: str = proto.Field( + proto.STRING, + number=2, + ) + final_backup: bool = proto.Field( + proto.BOOL, + number=3, + ) + source_instance: str = proto.Field( + proto.STRING, + number=4, + ) + instance_tier: str = proto.Field( + proto.STRING, + number=6, + ) + + +class CloudSqlInstanceDataSourceReferenceProperties(proto.Message): + r"""CloudSqlInstanceDataSourceReferenceProperties represents the + properties of a Cloud SQL resource that are stored in the + DataSourceReference. + + Attributes: + name (str): + Output only. Name of the Cloud SQL instance + backed up by the datasource. Format: + + projects/{project}/instances/{instance} + database_installed_version (str): + Output only. The installed database version + of the Cloud SQL instance. + instance_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The instance creation timestamp. + instance_tier (str): + Output only. The tier (or machine type) for this instance. + Example: ``db-custom-1-3840`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + database_installed_version: str = proto.Field( + proto.STRING, + number=2, + ) + instance_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + instance_tier: str = proto.Field( + proto.STRING, + number=5, + ) + + +class CloudSqlInstanceInitializationConfig(proto.Message): + r"""CloudSqlInstanceInitializationConfig contains the + configuration for initializing a Cloud SQL instance. + + Attributes: + edition (google.cloud.backupdr_v1.types.CloudSqlInstanceInitializationConfig.Edition): + Required. The edition of the Cloud SQL + instance. + """ + + class Edition(proto.Enum): + r"""The edition of the Cloud SQL instance. For details, see + https://cloud.google.com/sql/docs/editions-intro. + + Values: + EDITION_UNSPECIFIED (0): + Unspecified edition. + ENTERPRISE (1): + Enterprise edition. + ENTERPRISE_PLUS (2): + Enterprise Plus edition. + """ + EDITION_UNSPECIFIED = 0 + ENTERPRISE = 1 + ENTERPRISE_PLUS = 2 + + edition: Edition = proto.Field( + proto.ENUM, + number=1, + enum=Edition, + ) + + +class CloudSqlInstanceBackupPlanAssociationProperties(proto.Message): + r"""Cloud SQL instance's BPA properties. + + Attributes: + instance_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the instance was + created. + """ + + instance_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_disk.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_disk.py new file mode 100644 index 000000000000..02c7ab10a04a --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_disk.py @@ -0,0 +1,461 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.backupdr_v1.types import backupvault_gce + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "DiskTargetEnvironment", + "RegionDiskTargetEnvironment", + "DiskRestoreProperties", + "DiskBackupProperties", + "DiskDataSourceProperties", + }, +) + + +class DiskTargetEnvironment(proto.Message): + r"""DiskTargetEnvironment represents the target environment for + the disk. + + Attributes: + project (str): + Required. Target project for the disk. + zone (str): + Required. Target zone for the disk. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RegionDiskTargetEnvironment(proto.Message): + r"""RegionDiskTargetEnvironment represents the target environment + for the disk. + + Attributes: + project (str): + Required. Target project for the disk. + region (str): + Required. Target region for the disk. + replica_zones (MutableSequence[str]): + Required. Target URLs of the replica zones + for the disk. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + region: str = proto.Field( + proto.STRING, + number=2, + ) + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DiskRestoreProperties(proto.Message): + r"""DiskRestoreProperties represents the properties of a Disk + restore. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. Name of the disk. + + This field is a member of `oneof`_ ``_name``. + description (str): + Optional. An optional description of this + resource. Provide this property when you create + the resource. + + This field is a member of `oneof`_ ``_description``. + size_gb (int): + Required. The size of the disk in GB. + + This field is a member of `oneof`_ ``_size_gb``. + licenses (MutableSequence[str]): + Optional. A list of publicly available + licenses that are applicable to this backup. + This is applicable if the original image had + licenses attached, e.g. Windows image + guest_os_feature (MutableSequence[google.cloud.backupdr_v1.types.GuestOsFeature]): + Optional. A list of features to enable in the + guest operating system. This is applicable only + for bootable images. + disk_encryption_key (google.cloud.backupdr_v1.types.CustomerEncryptionKey): + Optional. Encrypts the disk using a + customer-supplied encryption key or a + customer-managed encryption key. + + This field is a member of `oneof`_ ``_disk_encryption_key``. + physical_block_size_bytes (int): + Optional. Physical block size of the + persistent disk, in bytes. If not present in a + request, a default value is used. Currently, the + supported size is 4096. + + This field is a member of `oneof`_ ``_physical_block_size_bytes``. + provisioned_iops (int): + Optional. Indicates how many IOPS to + provision for the disk. This sets the number of + I/O operations per second that the disk can + handle. + + This field is a member of `oneof`_ ``_provisioned_iops``. + provisioned_throughput (int): + Optional. Indicates how much throughput to + provision for the disk. This sets the number of + throughput MB per second that the disk can + handle. + + This field is a member of `oneof`_ ``_provisioned_throughput``. + enable_confidential_compute (bool): + Optional. Indicates whether this disk is + using confidential compute mode. Encryption with + a Cloud KMS key is required to enable this + option. + + This field is a member of `oneof`_ ``_enable_confidential_compute``. + storage_pool (str): + Optional. The storage pool in which the new + disk is created. You can provide this as a + partial or full URL to the resource. + + This field is a member of `oneof`_ ``_storage_pool``. + access_mode (google.cloud.backupdr_v1.types.DiskRestoreProperties.AccessMode): + Optional. The access mode of the disk. + + This field is a member of `oneof`_ ``_access_mode``. + architecture (google.cloud.backupdr_v1.types.DiskRestoreProperties.Architecture): + Optional. The architecture of the source disk. Valid values + are ARM64 or X86_64. + + This field is a member of `oneof`_ ``_architecture``. + resource_policy (MutableSequence[str]): + Optional. Resource policies applied to this + disk. + type_ (str): + Required. URL of the disk type resource + describing which disk type to use to create the + disk. + + This field is a member of `oneof`_ ``_type``. + labels (MutableMapping[str, str]): + Optional. Labels to apply to this disk. These + can be modified later using + setLabels method. Label values can + be empty. + resource_manager_tags (MutableMapping[str, str]): + Optional. Resource manager tags to be bound + to the disk. + """ + + class AccessMode(proto.Enum): + r"""The supported access modes of the disk. + + Values: + READ_WRITE_SINGLE (0): + The default AccessMode, means the disk can be + attached to single instance in RW mode. + READ_WRITE_MANY (1): + The AccessMode means the disk can be attached + to multiple instances in RW mode. + READ_ONLY_MANY (2): + The AccessMode means the disk can be attached + to multiple instances in RO mode. + """ + READ_WRITE_SINGLE = 0 + READ_WRITE_MANY = 1 + READ_ONLY_MANY = 2 + + class Architecture(proto.Enum): + r"""Architecture of the source disk. + + Values: + ARCHITECTURE_UNSPECIFIED (0): + Default value. This value is unused. + X86_64 (1): + Disks with architecture X86_64 + ARM64 (2): + Disks with architecture ARM64 + """ + ARCHITECTURE_UNSPECIFIED = 0 + X86_64 = 1 + ARM64 = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + size_gb: int = proto.Field( + proto.INT64, + number=3, + optional=True, + ) + licenses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + guest_os_feature: MutableSequence[ + backupvault_gce.GuestOsFeature + ] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=backupvault_gce.GuestOsFeature, + ) + disk_encryption_key: backupvault_gce.CustomerEncryptionKey = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=backupvault_gce.CustomerEncryptionKey, + ) + physical_block_size_bytes: int = proto.Field( + proto.INT64, + number=7, + optional=True, + ) + provisioned_iops: int = proto.Field( + proto.INT64, + number=8, + optional=True, + ) + provisioned_throughput: int = proto.Field( + proto.INT64, + number=9, + optional=True, + ) + enable_confidential_compute: bool = proto.Field( + proto.BOOL, + number=10, + optional=True, + ) + storage_pool: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + access_mode: AccessMode = proto.Field( + proto.ENUM, + number=12, + optional=True, + enum=AccessMode, + ) + architecture: Architecture = proto.Field( + proto.ENUM, + number=14, + optional=True, + enum=Architecture, + ) + resource_policy: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + type_: str = proto.Field( + proto.STRING, + number=16, + optional=True, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=17, + ) + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=18, + ) + + +class DiskBackupProperties(proto.Message): + r"""DiskBackupProperties represents the properties of a Disk + backup. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + A description of the source disk. + + This field is a member of `oneof`_ ``_description``. + licenses (MutableSequence[str]): + A list of publicly available licenses that + are applicable to this backup. This is + applicable if the original image had licenses + attached, e.g. Windows image. + guest_os_feature (MutableSequence[google.cloud.backupdr_v1.types.GuestOsFeature]): + A list of guest OS features that are + applicable to this backup. + architecture (google.cloud.backupdr_v1.types.DiskBackupProperties.Architecture): + The architecture of the source disk. Valid values are ARM64 + or X86_64. + + This field is a member of `oneof`_ ``_architecture``. + type_ (str): + The URL of the type of the disk. + + This field is a member of `oneof`_ ``_type``. + size_gb (int): + Size(in GB) of the source disk. + + This field is a member of `oneof`_ ``_size_gb``. + region (str): + Region and zone are mutually exclusive + fields. The URL of the region of the source + disk. + + This field is a member of `oneof`_ ``_region``. + zone (str): + The URL of the Zone where the source disk. + + This field is a member of `oneof`_ ``_zone``. + replica_zones (MutableSequence[str]): + The URL of the Zones where the source disk + should be replicated. + source_disk (str): + The source disk used to create this backup. + + This field is a member of `oneof`_ ``_source_disk``. + """ + + class Architecture(proto.Enum): + r"""Architecture of the source disk. + + Values: + ARCHITECTURE_UNSPECIFIED (0): + Default value. This value is unused. + X86_64 (1): + Disks with architecture X86_64 + ARM64 (2): + Disks with architecture ARM64 + """ + ARCHITECTURE_UNSPECIFIED = 0 + X86_64 = 1 + ARM64 = 2 + + description: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + licenses: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + guest_os_feature: MutableSequence[ + backupvault_gce.GuestOsFeature + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=backupvault_gce.GuestOsFeature, + ) + architecture: Architecture = proto.Field( + proto.ENUM, + number=4, + optional=True, + enum=Architecture, + ) + type_: str = proto.Field( + proto.STRING, + number=5, + optional=True, + ) + size_gb: int = proto.Field( + proto.INT64, + number=6, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=8, + optional=True, + ) + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=9, + ) + source_disk: str = proto.Field( + proto.STRING, + number=10, + optional=True, + ) + + +class DiskDataSourceProperties(proto.Message): + r"""DiskDataSourceProperties represents the properties of a + Disk resource that are stored in the DataSource. + . + + Attributes: + name (str): + Name of the disk backed up by the datasource. + description (str): + The description of the disk. + type_ (str): + The type of the disk. + size_gb (int): + The size of the disk in GB. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + size_gb: int = proto.Field( + proto.INT64, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py index 04e1b4c1a897..9c0a89e1bc34 100644 --- a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/backupvault_gce.py @@ -291,7 +291,9 @@ class ComputeInstanceRestoreProperties(proto.Message): disks (MutableSequence[google.cloud.backupdr_v1.types.AttachedDisk]): Optional. Array of disks associated with this instance. Persistent disks must be created - before you can assign them. + before you can assign them. Source regional + persistent disks will be restored with default + replica zones if not specified. display_device (google.cloud.backupdr_v1.types.DisplayDevice): Optional. Enables display device for the instance. @@ -343,6 +345,8 @@ class ComputeInstanceRestoreProperties(proto.Message): are configured to interact with other network services, such as connecting to the internet. Multiple interfaces are supported per instance. + Required to restore in different project or + region. network_performance_config (google.cloud.backupdr_v1.types.NetworkPerformanceConfig): Optional. Configure network performance such as egress bandwidth tier. @@ -366,7 +370,8 @@ class ComputeInstanceRestoreProperties(proto.Message): This field is a member of `oneof`_ ``_allocation_affinity``. resource_policies (MutableSequence[str]): Optional. Resource policies applied to this - instance. + instance. By default, no resource policies will + be applied. scheduling (google.cloud.backupdr_v1.types.Scheduling): Optional. Sets the scheduling options for this instance. diff --git a/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/datasourcereference.py b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/datasourcereference.py new file mode 100644 index 000000000000..2ac0e8a305db --- /dev/null +++ b/packages/google-cloud-backupdr/google/cloud/backupdr_v1/types/datasourcereference.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.backupdr_v1.types import backupvault, backupvault_cloudsql + +__protobuf__ = proto.module( + package="google.cloud.backupdr.v1", + manifest={ + "DataSourceReference", + "DataSourceBackupConfigInfo", + "DataSourceGcpResourceInfo", + "GetDataSourceReferenceRequest", + "FetchDataSourceReferencesForResourceTypeRequest", + "FetchDataSourceReferencesForResourceTypeResponse", + }, +) + + +class DataSourceReference(proto.Message): + r"""DataSourceReference is a reference to a DataSource resource. + + Attributes: + name (str): + Identifier. The resource name of the DataSourceReference. + Format: + projects/{project}/locations/{location}/dataSourceReferences/{data_source_reference} + data_source (str): + Output only. The resource name of the + DataSource. Format: + + projects/{project}/locations/{location}/backupVaults/{backupVault}/dataSources/{dataSource} + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the + DataSourceReference was created. + data_source_backup_config_state (google.cloud.backupdr_v1.types.BackupConfigState): + Output only. The backup configuration state + of the DataSource. + data_source_backup_count (int): + Output only. Number of backups in the + DataSource. + data_source_backup_config_info (google.cloud.backupdr_v1.types.DataSourceBackupConfigInfo): + Output only. Information of backup + configuration on the DataSource. + data_source_gcp_resource_info (google.cloud.backupdr_v1.types.DataSourceGcpResourceInfo): + Output only. The GCP resource that the + DataSource is associated with. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_source: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + data_source_backup_config_state: backupvault.BackupConfigState = proto.Field( + proto.ENUM, + number=4, + enum=backupvault.BackupConfigState, + ) + data_source_backup_count: int = proto.Field( + proto.INT64, + number=5, + ) + data_source_backup_config_info: "DataSourceBackupConfigInfo" = proto.Field( + proto.MESSAGE, + number=6, + message="DataSourceBackupConfigInfo", + ) + data_source_gcp_resource_info: "DataSourceGcpResourceInfo" = proto.Field( + proto.MESSAGE, + number=7, + message="DataSourceGcpResourceInfo", + ) + + +class DataSourceBackupConfigInfo(proto.Message): + r"""Information of backup configuration on the DataSource. + + Attributes: + last_backup_state (google.cloud.backupdr_v1.types.BackupConfigInfo.LastBackupState): + Output only. The status of the last backup in + this DataSource + last_successful_backup_consistency_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Timestamp of the last successful + backup to this DataSource. + """ + + last_backup_state: backupvault.BackupConfigInfo.LastBackupState = proto.Field( + proto.ENUM, + number=1, + enum=backupvault.BackupConfigInfo.LastBackupState, + ) + last_successful_backup_consistency_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class DataSourceGcpResourceInfo(proto.Message): + r"""The GCP resource that the DataSource is associated with. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + gcp_resourcename (str): + Output only. The resource name of the GCP + resource. Ex: + projects/{project}/zones/{zone}/instances/{instance} + type_ (str): + Output only. The type of the GCP resource. + Ex: compute.googleapis.com/Instance + location (str): + Output only. The location of the GCP + resource. Ex: + //"global"/"unspecified". + cloud_sql_instance_properties (google.cloud.backupdr_v1.types.CloudSqlInstanceDataSourceReferenceProperties): + Output only. The properties of the Cloud SQL + instance. + + This field is a member of `oneof`_ ``resource_properties``. + """ + + gcp_resourcename: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + cloud_sql_instance_properties: backupvault_cloudsql.CloudSqlInstanceDataSourceReferenceProperties = proto.Field( + proto.MESSAGE, + number=4, + oneof="resource_properties", + message=backupvault_cloudsql.CloudSqlInstanceDataSourceReferenceProperties, + ) + + +class GetDataSourceReferenceRequest(proto.Message): + r"""Request for the GetDataSourceReference method. + + Attributes: + name (str): + Required. The name of the DataSourceReference to retrieve. + Format: + projects/{project}/locations/{location}/dataSourceReferences/{data_source_reference} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class FetchDataSourceReferencesForResourceTypeRequest(proto.Message): + r"""Request for the FetchDataSourceReferencesForResourceType + method. + + Attributes: + parent (str): + Required. The parent resource name. + Format: projects/{project}/locations/{location} + resource_type (str): + Required. The type of the GCP resource. + Ex: sql.googleapis.com/Instance + page_size (int): + Optional. The maximum number of + DataSourceReferences to return. The service may + return fewer than this value. If unspecified, at + most 50 DataSourceReferences will be returned. + The maximum value is 100; values above 100 will + be coerced to 100. + page_token (str): + Optional. A page token, received from a previous call of + ``FetchDataSourceReferencesForResourceType``. Provide this + to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``FetchDataSourceReferencesForResourceType`` must match the + call that provided the page token. + filter (str): + Optional. A filter expression that filters the results + fetched in the response. The expression must specify the + field name, a comparison operator, and the value that you + want to use for filtering. Supported fields: + + - data_source + - data_source_gcp_resource_info.gcp_resourcename + - data_source_backup_config_state + - data_source_backup_count + - data_source_backup_config_info.last_backup_state + - data_source_gcp_resource_info.gcp_resourcename + - data_source_gcp_resource_info.type + - data_source_gcp_resource_info.location + - data_source_gcp_resource_info.cloud_sql_instance_properties.instance_create_time + order_by (str): + Optional. A comma-separated list of fields to order by, + sorted in ascending order. Use "desc" after a field name for + descending. + + Supported fields: + + - name + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + order_by: str = proto.Field( + proto.STRING, + number=6, + ) + + +class FetchDataSourceReferencesForResourceTypeResponse(proto.Message): + r"""Response for the FetchDataSourceReferencesForResourceType + method. + + Attributes: + data_source_references (MutableSequence[google.cloud.backupdr_v1.types.DataSourceReference]): + The DataSourceReferences from the specified + parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + data_source_references: MutableSequence[ + "DataSourceReference" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataSourceReference", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py new file mode 100644 index 000000000000..79c337018dc6 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchBackupPlanAssociationsForResourceType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchBackupPlanAssociationsForResourceType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_fetch_backup_plan_associations_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchBackupPlanAssociationsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_backup_plan_associations_for_resource_type(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchBackupPlanAssociationsForResourceType_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py new file mode 100644 index 000000000000..5b1b473b59e8 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchBackupPlanAssociationsForResourceType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchBackupPlanAssociationsForResourceType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_fetch_backup_plan_associations_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchBackupPlanAssociationsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_backup_plan_associations_for_resource_type(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchBackupPlanAssociationsForResourceType_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py new file mode 100644 index 000000000000..43838143c70c --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDataSourceReferencesForResourceType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchDataSourceReferencesForResourceType_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_fetch_data_source_references_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchDataSourceReferencesForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_data_source_references_for_resource_type(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchDataSourceReferencesForResourceType_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py new file mode 100644 index 000000000000..3ba0f77a07e5 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchDataSourceReferencesForResourceType +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_FetchDataSourceReferencesForResourceType_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_fetch_data_source_references_for_resource_type(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.FetchDataSourceReferencesForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + ) + + # Make the request + page_result = client.fetch_data_source_references_for_resource_type(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_FetchDataSourceReferencesForResourceType_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py new file mode 100644 index 000000000000..64e827830c04 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanRevision_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_backup_plan_revision(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRevisionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_backup_plan_revision(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanRevision_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py new file mode 100644 index 000000000000..cf3c058e747e --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBackupPlanRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetBackupPlanRevision_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_backup_plan_revision(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetBackupPlanRevisionRequest( + name="name_value", + ) + + # Make the request + response = client.get_backup_plan_revision(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetBackupPlanRevision_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_async.py new file mode 100644 index 000000000000..47a19246fa69 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSourceReference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSourceReference_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_get_data_source_reference(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceReferenceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_source_reference(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSourceReference_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py new file mode 100644 index 000000000000..8172d71f1a12 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataSourceReference +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_GetDataSourceReference_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_get_data_source_reference(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.GetDataSourceReferenceRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_source_reference(request=request) + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_GetDataSourceReference_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py index 46a478779e53..204a26bb960e 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_async.py @@ -39,7 +39,11 @@ async def sample_initialize_service(): client = backupdr_v1.BackupDRAsyncClient() # Initialize request argument(s) + cloud_sql_instance_initialization_config = backupdr_v1.CloudSqlInstanceInitializationConfig() + cloud_sql_instance_initialization_config.edition = "ENTERPRISE_PLUS" + request = backupdr_v1.InitializeServiceRequest( + cloud_sql_instance_initialization_config=cloud_sql_instance_initialization_config, name="name_value", resource_type="resource_type_value", ) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py index 377e3cfb2969..2bb388010b2f 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_initialize_service_sync.py @@ -39,7 +39,11 @@ def sample_initialize_service(): client = backupdr_v1.BackupDRClient() # Initialize request argument(s) + cloud_sql_instance_initialization_config = backupdr_v1.CloudSqlInstanceInitializationConfig() + cloud_sql_instance_initialization_config.edition = "ENTERPRISE_PLUS" + request = backupdr_v1.InitializeServiceRequest( + cloud_sql_instance_initialization_config=cloud_sql_instance_initialization_config, name="name_value", resource_type="resource_type_value", ) diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py new file mode 100644 index 000000000000..4bfa7c4e57ad --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanRevisions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_list_backup_plan_revisions(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanRevisions_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py new file mode 100644 index 000000000000..61bdb80276bd --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBackupPlanRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_ListBackupPlanRevisions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_list_backup_plan_revisions(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + request = backupdr_v1.ListBackupPlanRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_backup_plan_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END backupdr_v1_generated_BackupDR_ListBackupPlanRevisions_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py new file mode 100644 index 000000000000..772f73ee5782 --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupPlanAssociation_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.UpdateBackupPlanAssociationRequest( + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.update_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupPlanAssociation_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py new file mode 100644 index 000000000000..2d99f9bdbdec --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupPlanAssociation +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupPlanAssociation_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_backup_plan_association(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan_association = backupdr_v1.BackupPlanAssociation() + backup_plan_association.resource_type = "resource_type_value" + backup_plan_association.resource = "resource_value" + backup_plan_association.backup_plan = "backup_plan_value" + + request = backupdr_v1.UpdateBackupPlanAssociationRequest( + backup_plan_association=backup_plan_association, + ) + + # Make the request + operation = client.update_backup_plan_association(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupPlanAssociation_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_async.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_async.py new file mode 100644 index 000000000000..9e2997d0929b --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_async.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupPlan_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +async def sample_update_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRAsyncClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.UpdateBackupPlanRequest( + backup_plan=backup_plan, + ) + + # Make the request + operation = client.update_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupPlan_async] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_sync.py b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_sync.py new file mode 100644 index 000000000000..0aaa931b12ce --- /dev/null +++ b/packages/google-cloud-backupdr/samples/generated_samples/backupdr_v1_generated_backup_dr_update_backup_plan_sync.py @@ -0,0 +1,66 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBackupPlan +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-backupdr + + +# [START backupdr_v1_generated_BackupDR_UpdateBackupPlan_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import backupdr_v1 + + +def sample_update_backup_plan(): + # Create a client + client = backupdr_v1.BackupDRClient() + + # Initialize request argument(s) + backup_plan = backupdr_v1.BackupPlan() + backup_plan.backup_rules.standard_schedule.recurrence_type = "YEARLY" + backup_plan.backup_rules.standard_schedule.backup_window.start_hour_of_day = 1820 + backup_plan.backup_rules.standard_schedule.backup_window.end_hour_of_day = 1573 + backup_plan.backup_rules.standard_schedule.time_zone = "time_zone_value" + backup_plan.backup_rules.rule_id = "rule_id_value" + backup_plan.backup_rules.backup_retention_days = 2237 + backup_plan.resource_type = "resource_type_value" + backup_plan.backup_vault = "backup_vault_value" + + request = backupdr_v1.UpdateBackupPlanRequest( + backup_plan=backup_plan, + ) + + # Make the request + operation = client.update_backup_plan(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END backupdr_v1_generated_BackupDR_UpdateBackupPlan_sync] diff --git a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json index 1ba516eae2c7..681d1fa60eec 100644 --- a/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json +++ b/packages/google-cloud-backupdr/samples/generated_samples/snippet_metadata_google.cloud.backupdr.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-backupdr", - "version": "0.2.4" + "version": "0.1.0" }, "snippets": [ { @@ -1532,24 +1532,28 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_usable_backup_vaults", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_backup_plan_associations_for_resource_type", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchBackupPlanAssociationsForResourceType", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "FetchUsableBackupVaults" + "shortName": "FetchBackupPlanAssociationsForResourceType" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + "type": "google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeRequest" }, { "name": "parent", "type": "str" }, + { + "name": "resource_type", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1563,22 +1567,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager", - "shortName": "fetch_usable_backup_vaults" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchBackupPlanAssociationsForResourceTypeAsyncPager", + "shortName": "fetch_backup_plan_associations_for_resource_type" }, - "description": "Sample for FetchUsableBackupVaults", - "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py", + "description": "Sample for FetchBackupPlanAssociationsForResourceType", + "file": "backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async", + "regionTag": "backupdr_v1_generated_BackupDR_FetchBackupPlanAssociationsForResourceType_async", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -1588,22 +1592,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py" + "title": "backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_async.py" }, { "canonical": true, @@ -1612,24 +1616,28 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_backup_plan_associations_for_resource_type", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchBackupPlanAssociationsForResourceType", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "FetchUsableBackupVaults" + "shortName": "FetchBackupPlanAssociationsForResourceType" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" + "type": "google.cloud.backupdr_v1.types.FetchBackupPlanAssociationsForResourceTypeRequest" }, { "name": "parent", "type": "str" }, + { + "name": "resource_type", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1643,22 +1651,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager", - "shortName": "fetch_usable_backup_vaults" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchBackupPlanAssociationsForResourceTypePager", + "shortName": "fetch_backup_plan_associations_for_resource_type" }, - "description": "Sample for FetchUsableBackupVaults", - "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py", + "description": "Sample for FetchBackupPlanAssociationsForResourceType", + "file": "backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync", + "regionTag": "backupdr_v1_generated_BackupDR_FetchBackupPlanAssociationsForResourceType_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -1668,22 +1676,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py" + "title": "backupdr_v1_generated_backup_dr_fetch_backup_plan_associations_for_resource_type_sync.py" }, { "canonical": true, @@ -1693,22 +1701,26 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan_association", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_data_source_references_for_resource_type", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchDataSourceReferencesForResourceType", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetBackupPlanAssociation" + "shortName": "FetchDataSourceReferencesForResourceType" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + "type": "google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "resource_type", "type": "str" }, { @@ -1724,22 +1736,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", - "shortName": "get_backup_plan_association" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchDataSourceReferencesForResourceTypeAsyncPager", + "shortName": "fetch_data_source_references_for_resource_type" }, - "description": "Sample for GetBackupPlanAssociation", - "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py", + "description": "Sample for FetchDataSourceReferencesForResourceType", + "file": "backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async", + "regionTag": "backupdr_v1_generated_BackupDR_FetchDataSourceReferencesForResourceType_async", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -1749,22 +1761,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py" + "title": "backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_async.py" }, { "canonical": true, @@ -1773,22 +1785,26 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_data_source_references_for_resource_type", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchDataSourceReferencesForResourceType", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetBackupPlanAssociation" + "shortName": "FetchDataSourceReferencesForResourceType" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" + "type": "google.cloud.backupdr_v1.types.FetchDataSourceReferencesForResourceTypeRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "resource_type", "type": "str" }, { @@ -1804,22 +1820,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", - "shortName": "get_backup_plan_association" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchDataSourceReferencesForResourceTypePager", + "shortName": "fetch_data_source_references_for_resource_type" }, - "description": "Sample for GetBackupPlanAssociation", - "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py", + "description": "Sample for FetchDataSourceReferencesForResourceType", + "file": "backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync", + "regionTag": "backupdr_v1_generated_BackupDR_FetchDataSourceReferencesForResourceType_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -1829,22 +1845,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py" + "title": "backupdr_v1_generated_backup_dr_fetch_data_source_references_for_resource_type_sync.py" }, { "canonical": true, @@ -1854,22 +1870,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.fetch_usable_backup_vaults", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetBackupPlan" + "shortName": "FetchUsableBackupVaults" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1885,22 +1901,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.BackupPlan", - "shortName": "get_backup_plan" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsAsyncPager", + "shortName": "fetch_usable_backup_vaults" }, - "description": "Sample for GetBackupPlan", - "file": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py", + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_async", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1920,12 +1936,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py" + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_async.py" }, { "canonical": true, @@ -1934,22 +1950,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.fetch_usable_backup_vaults", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", + "fullName": "google.cloud.backupdr.v1.BackupDR.FetchUsableBackupVaults", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetBackupPlan" + "shortName": "FetchUsableBackupVaults" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" + "type": "google.cloud.backupdr_v1.types.FetchUsableBackupVaultsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1965,22 +1981,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.BackupPlan", - "shortName": "get_backup_plan" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.FetchUsableBackupVaultsPager", + "shortName": "fetch_usable_backup_vaults" }, - "description": "Sample for GetBackupPlan", - "file": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py", + "description": "Sample for FetchUsableBackupVaults", + "file": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_sync", + "regionTag": "backupdr_v1_generated_BackupDR_FetchUsableBackupVaults_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2000,12 +2016,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py" + "title": "backupdr_v1_generated_backup_dr_fetch_usable_backup_vaults_sync.py" }, { "canonical": true, @@ -2015,19 +2031,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_vault", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan_association", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetBackupVault" + "shortName": "GetBackupPlanAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" }, { "name": "name", @@ -2046,14 +2062,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.BackupVault", - "shortName": "get_backup_vault" + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" }, - "description": "Sample for GetBackupVault", - "file": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py", + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_async", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_async", "segments": [ { "end": 51, @@ -2086,7 +2102,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_async.py" }, { "canonical": true, @@ -2095,19 +2111,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_association", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanAssociation", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetBackupVault" + "shortName": "GetBackupPlanAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupPlanAssociationRequest" }, { "name": "name", @@ -2126,14 +2142,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.BackupVault", - "shortName": "get_backup_vault" + "resultType": "google.cloud.backupdr_v1.types.BackupPlanAssociation", + "shortName": "get_backup_plan_association" }, - "description": "Sample for GetBackupVault", - "file": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py", + "description": "Sample for GetBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_sync", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanAssociation_sync", "segments": [ { "end": 51, @@ -2166,7 +2182,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_association_sync.py" }, { "canonical": true, @@ -2176,19 +2192,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan_revision", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanRevision", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetBackup" + "shortName": "GetBackupPlanRevision" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRevisionRequest" }, { "name": "name", @@ -2207,14 +2223,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.Backup", - "shortName": "get_backup" + "resultType": "google.cloud.backupdr_v1.types.BackupPlanRevision", + "shortName": "get_backup_plan_revision" }, - "description": "Sample for GetBackup", - "file": "backupdr_v1_generated_backup_dr_get_backup_async.py", + "description": "Sample for GetBackupPlanRevision", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_async", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanRevision_async", "segments": [ { "end": 51, @@ -2247,7 +2263,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_backup_async.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_revision_async.py" }, { "canonical": true, @@ -2256,19 +2272,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan_revision", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlanRevision", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetBackup" + "shortName": "GetBackupPlanRevision" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRevisionRequest" }, { "name": "name", @@ -2287,14 +2303,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.Backup", - "shortName": "get_backup" + "resultType": "google.cloud.backupdr_v1.types.BackupPlanRevision", + "shortName": "get_backup_plan_revision" }, - "description": "Sample for GetBackup", - "file": "backupdr_v1_generated_backup_dr_get_backup_sync.py", + "description": "Sample for GetBackupPlanRevision", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_sync", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlanRevision_sync", "segments": [ { "end": 51, @@ -2327,7 +2343,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_backup_sync.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_revision_sync.py" }, { "canonical": true, @@ -2337,19 +2353,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_data_source", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_plan", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetDataSource" + "shortName": "GetBackupPlan" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" }, { "name": "name", @@ -2368,14 +2384,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.DataSource", - "shortName": "get_data_source" + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" }, - "description": "Sample for GetDataSource", - "file": "backupdr_v1_generated_backup_dr_get_data_source_async.py", + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_async", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_async", "segments": [ { "end": 51, @@ -2408,7 +2424,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_data_source_async.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_async.py" }, { "canonical": true, @@ -2417,19 +2433,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_data_source", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_plan", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupPlan", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetDataSource" + "shortName": "GetBackupPlan" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupPlanRequest" }, { "name": "name", @@ -2448,14 +2464,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.DataSource", - "shortName": "get_data_source" + "resultType": "google.cloud.backupdr_v1.types.BackupPlan", + "shortName": "get_backup_plan" }, - "description": "Sample for GetDataSource", - "file": "backupdr_v1_generated_backup_dr_get_data_source_sync.py", + "description": "Sample for GetBackupPlan", + "file": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_sync", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupPlan_sync", "segments": [ { "end": 51, @@ -2488,7 +2504,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_data_source_sync.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_plan_sync.py" }, { "canonical": true, @@ -2498,19 +2514,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "GetBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" }, { "name": "name", @@ -2529,14 +2545,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_async", "segments": [ { "end": 51, @@ -2569,7 +2585,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_async.py" }, { "canonical": true, @@ -2578,19 +2594,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup_vault", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackupVault", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "GetManagementServer" + "shortName": "GetBackupVault" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupVaultRequest" }, { "name": "name", @@ -2609,14 +2625,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.types.ManagementServer", - "shortName": "get_management_server" + "resultType": "google.cloud.backupdr_v1.types.BackupVault", + "shortName": "get_backup_vault" }, - "description": "Sample for GetManagementServer", - "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", + "description": "Sample for GetBackupVault", + "file": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackupVault_sync", "segments": [ { "end": 51, @@ -2649,7 +2665,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_vault_sync.py" }, { "canonical": true, @@ -2659,19 +2675,23 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.initialize_service", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.InitializeService", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "InitializeService" + "shortName": "GetBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.InitializeServiceRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -2686,22 +2706,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "initialize_service" + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" }, - "description": "Sample for InitializeService", - "file": "backupdr_v1_generated_backup_dr_initialize_service_async.py", + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_InitializeService_async", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_async", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2711,22 +2731,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_initialize_service_async.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_async.py" }, { "canonical": true, @@ -2735,19 +2755,23 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.initialize_service", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_backup", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.InitializeService", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetBackup", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "InitializeService" + "shortName": "GetBackup" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.InitializeServiceRequest" + "type": "google.cloud.backupdr_v1.types.GetBackupRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -2762,22 +2786,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "initialize_service" + "resultType": "google.cloud.backupdr_v1.types.Backup", + "shortName": "get_backup" }, - "description": "Sample for InitializeService", - "file": "backupdr_v1_generated_backup_dr_initialize_service_sync.py", + "description": "Sample for GetBackup", + "file": "backupdr_v1_generated_backup_dr_get_backup_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_InitializeService_sync", + "regionTag": "backupdr_v1_generated_BackupDR_GetBackup_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2787,22 +2811,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_initialize_service_sync.py" + "title": "backupdr_v1_generated_backup_dr_get_backup_sync.py" }, { "canonical": true, @@ -2812,22 +2836,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plan_associations", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_data_source_reference", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSourceReference", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListBackupPlanAssociations" + "shortName": "GetDataSourceReference" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + "type": "google.cloud.backupdr_v1.types.GetDataSourceReferenceRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2843,22 +2867,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager", - "shortName": "list_backup_plan_associations" + "resultType": "google.cloud.backupdr_v1.types.DataSourceReference", + "shortName": "get_data_source_reference" }, - "description": "Sample for ListBackupPlanAssociations", - "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py", + "description": "Sample for GetDataSourceReference", + "file": "backupdr_v1_generated_backup_dr_get_data_source_reference_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSourceReference_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2878,12 +2902,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py" + "title": "backupdr_v1_generated_backup_dr_get_data_source_reference_async.py" }, { "canonical": true, @@ -2892,22 +2916,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_data_source_reference", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSourceReference", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListBackupPlanAssociations" + "shortName": "GetDataSourceReference" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" + "type": "google.cloud.backupdr_v1.types.GetDataSourceReferenceRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2923,22 +2947,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager", - "shortName": "list_backup_plan_associations" + "resultType": "google.cloud.backupdr_v1.types.DataSourceReference", + "shortName": "get_data_source_reference" }, - "description": "Sample for ListBackupPlanAssociations", - "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py", + "description": "Sample for GetDataSourceReference", + "file": "backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSourceReference_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2958,12 +2982,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py" + "title": "backupdr_v1_generated_backup_dr_get_data_source_reference_sync.py" }, { "canonical": true, @@ -2973,22 +2997,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plans", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListBackupPlans" + "shortName": "GetDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3004,22 +3028,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager", - "shortName": "list_backup_plans" + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" }, - "description": "Sample for ListBackupPlans", - "file": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py", + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_async", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3039,12 +3063,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py" + "title": "backupdr_v1_generated_backup_dr_get_data_source_async.py" }, { "canonical": true, @@ -3053,22 +3077,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_data_source", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetDataSource", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListBackupPlans" + "shortName": "GetDataSource" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" + "type": "google.cloud.backupdr_v1.types.GetDataSourceRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3084,22 +3108,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager", - "shortName": "list_backup_plans" + "resultType": "google.cloud.backupdr_v1.types.DataSource", + "shortName": "get_data_source" }, - "description": "Sample for ListBackupPlans", - "file": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py", + "description": "Sample for GetDataSource", + "file": "backupdr_v1_generated_backup_dr_get_data_source_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_sync", + "regionTag": "backupdr_v1_generated_BackupDR_GetDataSource_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3119,12 +3143,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py" + "title": "backupdr_v1_generated_backup_dr_get_data_source_sync.py" }, { "canonical": true, @@ -3134,22 +3158,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_vaults", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.get_management_server", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListBackupVaults" + "shortName": "GetManagementServer" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3165,22 +3189,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager", - "shortName": "list_backup_vaults" + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" }, - "description": "Sample for ListBackupVaults", - "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py", + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_async", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3200,12 +3224,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py" + "title": "backupdr_v1_generated_backup_dr_get_management_server_async.py" }, { "canonical": true, @@ -3214,22 +3238,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.get_management_server", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", + "fullName": "google.cloud.backupdr.v1.BackupDR.GetManagementServer", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListBackupVaults" + "shortName": "GetManagementServer" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" + "type": "google.cloud.backupdr_v1.types.GetManagementServerRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3245,22 +3269,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager", - "shortName": "list_backup_vaults" + "resultType": "google.cloud.backupdr_v1.types.ManagementServer", + "shortName": "get_management_server" }, - "description": "Sample for ListBackupVaults", - "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py", + "description": "Sample for GetManagementServer", + "file": "backupdr_v1_generated_backup_dr_get_management_server_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_sync", + "regionTag": "backupdr_v1_generated_BackupDR_GetManagementServer_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -3280,12 +3304,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py" + "title": "backupdr_v1_generated_backup_dr_get_management_server_sync.py" }, { "canonical": true, @@ -3295,23 +3319,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backups", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.initialize_service", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "fullName": "google.cloud.backupdr.v1.BackupDR.InitializeService", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListBackups" + "shortName": "InitializeService" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.backupdr_v1.types.InitializeServiceRequest" }, { "name": "retry", @@ -3326,22 +3346,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager", - "shortName": "list_backups" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "initialize_service" }, - "description": "Sample for ListBackups", - "file": "backupdr_v1_generated_backup_dr_list_backups_async.py", + "description": "Sample for InitializeService", + "file": "backupdr_v1_generated_backup_dr_initialize_service_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_async", + "regionTag": "backupdr_v1_generated_BackupDR_InitializeService_async", "segments": [ { - "end": 52, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 60, "start": 27, "type": "SHORT" }, @@ -3351,22 +3371,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_backups_async.py" + "title": "backupdr_v1_generated_backup_dr_initialize_service_async.py" }, { "canonical": true, @@ -3375,23 +3395,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backups", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.initialize_service", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "fullName": "google.cloud.backupdr.v1.BackupDR.InitializeService", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListBackups" + "shortName": "InitializeService" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.backupdr_v1.types.InitializeServiceRequest" }, { "name": "retry", @@ -3406,22 +3422,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager", - "shortName": "list_backups" + "resultType": "google.api_core.operation.Operation", + "shortName": "initialize_service" }, - "description": "Sample for ListBackups", - "file": "backupdr_v1_generated_backup_dr_list_backups_sync.py", + "description": "Sample for InitializeService", + "file": "backupdr_v1_generated_backup_dr_initialize_service_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_sync", + "regionTag": "backupdr_v1_generated_BackupDR_InitializeService_sync", "segments": [ { - "end": 52, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 60, "start": 27, "type": "SHORT" }, @@ -3431,22 +3447,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_backups_sync.py" + "title": "backupdr_v1_generated_backup_dr_initialize_service_sync.py" }, { "canonical": true, @@ -3456,19 +3472,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_data_sources", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plan_associations", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListDataSources" + "shortName": "ListBackupPlanAssociations" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" }, { "name": "parent", @@ -3487,14 +3503,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager", - "shortName": "list_data_sources" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsAsyncPager", + "shortName": "list_backup_plan_associations" }, - "description": "Sample for ListDataSources", - "file": "backupdr_v1_generated_backup_dr_list_data_sources_async.py", + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_async", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_async", "segments": [ { "end": 52, @@ -3527,7 +3543,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_data_sources_async.py" + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_async.py" }, { "canonical": true, @@ -3536,19 +3552,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_data_sources", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_associations", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanAssociations", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListDataSources" + "shortName": "ListBackupPlanAssociations" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + "type": "google.cloud.backupdr_v1.types.ListBackupPlanAssociationsRequest" }, { "name": "parent", @@ -3567,14 +3583,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager", - "shortName": "list_data_sources" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanAssociationsPager", + "shortName": "list_backup_plan_associations" }, - "description": "Sample for ListDataSources", - "file": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py", + "description": "Sample for ListBackupPlanAssociations", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_sync", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanAssociations_sync", "segments": [ { "end": 52, @@ -3607,7 +3623,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py" + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_associations_sync.py" }, { "canonical": true, @@ -3617,19 +3633,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plan_revisions", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanRevisions", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "ListBackupPlanRevisions" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.ListBackupPlanRevisionsRequest" }, { "name": "parent", @@ -3648,14 +3664,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", - "shortName": "list_management_servers" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanRevisionsAsyncPager", + "shortName": "list_backup_plan_revisions" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "description": "Sample for ListBackupPlanRevisions", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanRevisions_async", "segments": [ { "end": 52, @@ -3688,7 +3704,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_revisions_async.py" }, { "canonical": true, @@ -3697,19 +3713,19 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plan_revisions", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlanRevisions", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "ListManagementServers" + "shortName": "ListBackupPlanRevisions" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + "type": "google.cloud.backupdr_v1.types.ListBackupPlanRevisionsRequest" }, { "name": "parent", @@ -3728,14 +3744,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", - "shortName": "list_management_servers" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlanRevisionsPager", + "shortName": "list_backup_plan_revisions" }, - "description": "Sample for ListManagementServers", - "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "description": "Sample for ListBackupPlanRevisions", + "file": "backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlanRevisions_sync", "segments": [ { "end": 52, @@ -3768,7 +3784,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + "title": "backupdr_v1_generated_backup_dr_list_backup_plan_revisions_sync.py" }, { "canonical": true, @@ -3778,22 +3794,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.restore_backup", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_plans", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "RestoreBackup" + "shortName": "ListBackupPlans" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3809,22 +3825,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restore_backup" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansAsyncPager", + "shortName": "list_backup_plans" }, - "description": "Sample for RestoreBackup", - "file": "backupdr_v1_generated_backup_dr_restore_backup_async.py", + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_async", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_async", "segments": [ { - "end": 60, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3834,22 +3850,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_restore_backup_async.py" + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_async.py" }, { "canonical": true, @@ -3858,22 +3874,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.restore_backup", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_plans", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupPlans", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "RestoreBackup" + "shortName": "ListBackupPlans" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" + "type": "google.cloud.backupdr_v1.types.ListBackupPlansRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3889,22 +3905,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restore_backup" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupPlansPager", + "shortName": "list_backup_plans" }, - "description": "Sample for RestoreBackup", - "file": "backupdr_v1_generated_backup_dr_restore_backup_sync.py", + "description": "Sample for ListBackupPlans", + "file": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_sync", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupPlans_sync", "segments": [ { - "end": 60, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 60, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3914,22 +3930,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 57, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 61, - "start": 58, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_restore_backup_sync.py" + "title": "backupdr_v1_generated_backup_dr_list_backup_plans_sync.py" }, { "canonical": true, @@ -3939,26 +3955,22 @@ "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", "shortName": "BackupDRAsyncClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.trigger_backup", + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backup_vaults", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "TriggerBackup" + "shortName": "ListBackupVaults" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" }, { - "name": "rule_id", + "name": "parent", "type": "str" }, { @@ -3974,22 +3986,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "trigger_backup" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsAsyncPager", + "shortName": "list_backup_vaults" }, - "description": "Sample for TriggerBackup", - "file": "backupdr_v1_generated_backup_dr_trigger_backup_async.py", + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_async", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_async", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3999,22 +4011,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_trigger_backup_async.py" + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_async.py" }, { "canonical": true, @@ -4023,26 +4035,103 @@ "fullName": "google.cloud.backupdr_v1.BackupDRClient", "shortName": "BackupDRClient" }, - "fullName": "google.cloud.backupdr_v1.BackupDRClient.trigger_backup", + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backup_vaults", "method": { - "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackupVaults", "service": { "fullName": "google.cloud.backupdr.v1.BackupDR", "shortName": "BackupDR" }, - "shortName": "TriggerBackup" + "shortName": "ListBackupVaults" }, "parameters": [ { "name": "request", - "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" + "type": "google.cloud.backupdr_v1.types.ListBackupVaultsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "rule_id", + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupVaultsPager", + "shortName": "list_backup_vaults" + }, + "description": "Sample for ListBackupVaults", + "file": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackupVaults_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backup_vaults_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", "type": "str" }, { @@ -4058,22 +4147,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "trigger_backup" + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsAsyncPager", + "shortName": "list_backups" }, - "description": "Sample for TriggerBackup", - "file": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py", + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_sync", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_async", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -4083,22 +4172,1092 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, { "end": 53, - "start": 47, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_backups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_backups", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListBackups", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListBackups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListBackupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListBackupsPager", + "shortName": "list_backups" + }, + "description": "Sample for ListBackups", + "file": "backupdr_v1_generated_backup_dr_list_backups_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListBackups_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py" + "title": "backupdr_v1_generated_backup_dr_list_backups_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesAsyncPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_data_sources", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListDataSources", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListDataSources" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListDataSourcesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListDataSourcesPager", + "shortName": "list_data_sources" + }, + "description": "Sample for ListDataSources", + "file": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListDataSources_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_data_sources_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersAsyncPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.list_management_servers", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.ListManagementServers", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "ListManagementServers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.ListManagementServersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.backupdr_v1.services.backup_dr.pagers.ListManagementServersPager", + "shortName": "list_management_servers" + }, + "description": "Sample for ListManagementServers", + "file": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_ListManagementServers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_list_management_servers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "RestoreBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.restore_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.RestoreBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "RestoreBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.RestoreBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restore_backup" + }, + "description": "Sample for RestoreBackup", + "file": "backupdr_v1_generated_backup_dr_restore_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_RestoreBackup_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_restore_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.trigger_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "TriggerBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "rule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "trigger_backup" + }, + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_trigger_backup_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.trigger_backup", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.TriggerBackup", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "TriggerBackup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.TriggerBackupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "rule_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "trigger_backup" + }, + "description": "Sample for TriggerBackup", + "file": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_TriggerBackup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_trigger_backup_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "UpdateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.UpdateBackupPlanAssociationRequest" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup_plan_association" + }, + "description": "Sample for UpdateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupPlanAssociation_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_update_backup_plan_association_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup_plan_association", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupPlanAssociation", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "UpdateBackupPlanAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.UpdateBackupPlanAssociationRequest" + }, + { + "name": "backup_plan_association", + "type": "google.cloud.backupdr_v1.types.BackupPlanAssociation" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup_plan_association" + }, + "description": "Sample for UpdateBackupPlanAssociation", + "file": "backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupPlanAssociation_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_update_backup_plan_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient", + "shortName": "BackupDRAsyncClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRAsyncClient.update_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "UpdateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.UpdateBackupPlanRequest" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_backup_plan" + }, + "description": "Sample for UpdateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_update_backup_plan_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupPlan_async", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_update_backup_plan_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.backupdr_v1.BackupDRClient", + "shortName": "BackupDRClient" + }, + "fullName": "google.cloud.backupdr_v1.BackupDRClient.update_backup_plan", + "method": { + "fullName": "google.cloud.backupdr.v1.BackupDR.UpdateBackupPlan", + "service": { + "fullName": "google.cloud.backupdr.v1.BackupDR", + "shortName": "BackupDR" + }, + "shortName": "UpdateBackupPlan" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.backupdr_v1.types.UpdateBackupPlanRequest" + }, + { + "name": "backup_plan", + "type": "google.cloud.backupdr_v1.types.BackupPlan" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_backup_plan" + }, + "description": "Sample for UpdateBackupPlan", + "file": "backupdr_v1_generated_backup_dr_update_backup_plan_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "backupdr_v1_generated_BackupDR_UpdateBackupPlan_sync", + "segments": [ + { + "end": 65, + "start": 27, + "type": "FULL" + }, + { + "end": 65, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 66, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "backupdr_v1_generated_backup_dr_update_backup_plan_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py index 45379ccb92aa..7ccbfa21317b 100644 --- a/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py +++ b/packages/google-cloud-backupdr/scripts/fixup_backupdr_v1_keywords.py @@ -48,24 +48,31 @@ class backupdrCallTransformer(cst.CSTTransformer): 'delete_backup_plan_association': ('name', 'request_id', ), 'delete_backup_vault': ('name', 'request_id', 'force', 'etag', 'validate_only', 'allow_missing', 'ignore_backup_plan_references', ), 'delete_management_server': ('name', 'request_id', ), + 'fetch_backup_plan_associations_for_resource_type': ('parent', 'resource_type', 'page_size', 'page_token', 'filter', 'order_by', ), + 'fetch_data_source_references_for_resource_type': ('parent', 'resource_type', 'page_size', 'page_token', 'filter', 'order_by', ), 'fetch_usable_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'get_backup': ('name', 'view', ), 'get_backup_plan': ('name', ), 'get_backup_plan_association': ('name', ), + 'get_backup_plan_revision': ('name', ), 'get_backup_vault': ('name', 'view', ), 'get_data_source': ('name', ), + 'get_data_source_reference': ('name', ), 'get_management_server': ('name', ), - 'initialize_service': ('name', 'resource_type', 'request_id', ), + 'initialize_service': ('name', 'resource_type', 'request_id', 'cloud_sql_instance_initialization_config', ), 'list_backup_plan_associations': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_backup_plan_revisions': ('parent', 'page_size', 'page_token', ), 'list_backup_plans': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_backups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), 'list_backup_vaults': ('parent', 'page_size', 'page_token', 'filter', 'order_by', 'view', ), 'list_data_sources': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_management_servers': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'restore_backup': ('name', 'request_id', 'compute_instance_target_environment', 'compute_instance_restore_properties', ), + 'restore_backup': ('name', 'request_id', 'compute_instance_target_environment', 'disk_target_environment', 'region_disk_target_environment', 'compute_instance_restore_properties', 'disk_restore_properties', ), 'trigger_backup': ('name', 'rule_id', 'request_id', ), 'update_backup': ('update_mask', 'backup', 'request_id', ), - 'update_backup_vault': ('update_mask', 'backup_vault', 'request_id', 'validate_only', 'force', ), + 'update_backup_plan': ('backup_plan', 'update_mask', 'request_id', ), + 'update_backup_plan_association': ('backup_plan_association', 'update_mask', 'request_id', ), + 'update_backup_vault': ('update_mask', 'backup_vault', 'request_id', 'validate_only', 'force', 'force_update_access_restriction', ), 'update_data_source': ('update_mask', 'data_source', 'request_id', 'allow_missing', ), } diff --git a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py index 4db3b5db3b89..31dd03eb7a06 100644 --- a/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py +++ b/packages/google-cloud-backupdr/tests/unit/gapic/backupdr_v1/test_backup_dr.py @@ -87,7 +87,10 @@ backupplanassociation, backupvault, backupvault_ba, + backupvault_cloudsql, + backupvault_disk, backupvault_gce, + datasourcereference, ) CRED_INFO_JSON = { @@ -5846,6 +5849,7 @@ def test_get_data_source(request_type, transport: str = "grpc"): etag="etag_value", total_stored_bytes=1946, config_state=backupvault.BackupConfigState.ACTIVE, + backup_blocked_by_vault_access_restriction=True, ) response = client.get_data_source(request) @@ -5863,6 +5867,7 @@ def test_get_data_source(request_type, transport: str = "grpc"): assert response.etag == "etag_value" assert response.total_stored_bytes == 1946 assert response.config_state == backupvault.BackupConfigState.ACTIVE + assert response.backup_blocked_by_vault_access_restriction is True def test_get_data_source_non_empty_request_with_auto_populated_field(): @@ -5994,6 +5999,7 @@ async def test_get_data_source_async( etag="etag_value", total_stored_bytes=1946, config_state=backupvault.BackupConfigState.ACTIVE, + backup_blocked_by_vault_access_restriction=True, ) ) response = await client.get_data_source(request) @@ -6012,6 +6018,7 @@ async def test_get_data_source_async( assert response.etag == "etag_value" assert response.total_stored_bytes == 1946 assert response.config_state == backupvault.BackupConfigState.ACTIVE + assert response.backup_blocked_by_vault_access_restriction is True @pytest.mark.asyncio @@ -7067,6 +7074,8 @@ def test_get_backup(request_type, transport: str = "grpc"): state=backupvault.Backup.State.CREATING, backup_type=backupvault.Backup.BackupType.SCHEDULED, resource_size_bytes=2056, + satisfies_pzs=True, + satisfies_pzi=True, ) response = client.get_backup(request) @@ -7084,6 +7093,8 @@ def test_get_backup(request_type, transport: str = "grpc"): assert response.state == backupvault.Backup.State.CREATING assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED assert response.resource_size_bytes == 2056 + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True def test_get_backup_non_empty_request_with_auto_populated_field(): @@ -7213,6 +7224,8 @@ async def test_get_backup_async( state=backupvault.Backup.State.CREATING, backup_type=backupvault.Backup.BackupType.SCHEDULED, resource_size_bytes=2056, + satisfies_pzs=True, + satisfies_pzi=True, ) ) response = await client.get_backup(request) @@ -7231,6 +7244,8 @@ async def test_get_backup_async( assert response.state == backupvault.Backup.State.CREATING assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED assert response.resource_size_bytes == 2056 + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.asyncio @@ -8734,11 +8749,11 @@ async def test_create_backup_plan_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - backupplan.GetBackupPlanRequest, + backupplan.UpdateBackupPlanRequest, dict, ], ) -def test_get_backup_plan(request_type, transport: str = "grpc"): +def test_update_backup_plan(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -8749,37 +8764,24 @@ def test_get_backup_plan(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan( - name="name_value", - description="description_value", - state=backupplan.BackupPlan.State.CREATING, - resource_type="resource_type_value", - etag="etag_value", - backup_vault="backup_vault_value", - backup_vault_service_account="backup_vault_service_account_value", - ) - response = client.get_backup_plan(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplan.GetBackupPlanRequest() + request = backupplan.UpdateBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlan) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == backupplan.BackupPlan.State.CREATING - assert response.resource_type == "resource_type_value" - assert response.etag == "etag_value" - assert response.backup_vault == "backup_vault_value" - assert response.backup_vault_service_account == "backup_vault_service_account_value" + assert isinstance(response, future.Future) -def test_get_backup_plan_non_empty_request_with_auto_populated_field(): +def test_update_backup_plan_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -8790,24 +8792,22 @@ def test_get_backup_plan_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplan.GetBackupPlanRequest( - name="name_value", - ) + request = backupplan.UpdateBackupPlanRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_backup_plan(request=request) + client.update_backup_plan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.GetBackupPlanRequest( - name="name_value", - ) + assert args[0] == backupplan.UpdateBackupPlanRequest() -def test_get_backup_plan_use_cached_wrapped_rpc(): +def test_update_backup_plan_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -8821,21 +8821,30 @@ def test_get_backup_plan_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup_plan in client._transport._wrapped_methods + assert ( + client._transport.update_backup_plan in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_backup_plan + ] = mock_rpc request = {} - client.get_backup_plan(request) + client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_plan(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8843,7 +8852,7 @@ def test_get_backup_plan_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_backup_plan_async_use_cached_wrapped_rpc( +async def test_update_backup_plan_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8860,7 +8869,7 @@ async def test_get_backup_plan_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_backup_plan + client._client._transport.update_backup_plan in client._client._transport._wrapped_methods ) @@ -8868,16 +8877,21 @@ async def test_get_backup_plan_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_backup_plan + client._client._transport.update_backup_plan ] = mock_rpc request = {} - await client.get_backup_plan(request) + await client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_backup_plan(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8885,8 +8899,8 @@ async def test_get_backup_plan_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_backup_plan_async( - transport: str = "grpc_asyncio", request_type=backupplan.GetBackupPlanRequest +async def test_update_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.UpdateBackupPlanRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -8898,58 +8912,47 @@ async def test_get_backup_plan_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlan( - name="name_value", - description="description_value", - state=backupplan.BackupPlan.State.CREATING, - resource_type="resource_type_value", - etag="etag_value", - backup_vault="backup_vault_value", - backup_vault_service_account="backup_vault_service_account_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.get_backup_plan(request) + response = await client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.GetBackupPlanRequest() + request = backupplan.UpdateBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlan) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == backupplan.BackupPlan.State.CREATING - assert response.resource_type == "resource_type_value" - assert response.etag == "etag_value" - assert response.backup_vault == "backup_vault_value" - assert response.backup_vault_service_account == "backup_vault_service_account_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_backup_plan_async_from_dict(): - await test_get_backup_plan_async(request_type=dict) +async def test_update_backup_plan_async_from_dict(): + await test_update_backup_plan_async(request_type=dict) -def test_get_backup_plan_field_headers(): +def test_update_backup_plan_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.GetBackupPlanRequest() + request = backupplan.UpdateBackupPlanRequest() - request.name = "name_value" + request.backup_plan.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: - call.return_value = backupplan.BackupPlan() - client.get_backup_plan(request) + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -8960,28 +8963,30 @@ def test_get_backup_plan_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup_plan.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_backup_plan_field_headers_async(): +async def test_update_backup_plan_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.GetBackupPlanRequest() + request = backupplan.UpdateBackupPlanRequest() - request.name = "name_value" + request.backup_plan.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlan() + operations_pb2.Operation(name="operations/op") ) - await client.get_backup_plan(request) + await client.update_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -8992,35 +8997,41 @@ async def test_get_backup_plan_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup_plan.name=name_value", ) in kw["metadata"] -def test_get_backup_plan_flattened(): +def test_update_backup_plan_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan() + call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup_plan( - name="name_value", + client.update_backup_plan( + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_get_backup_plan_flattened_error(): +def test_update_backup_plan_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9028,43 +9039,50 @@ def test_get_backup_plan_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_plan( - backupplan.GetBackupPlanRequest(), - name="name_value", + client.update_backup_plan( + backupplan.UpdateBackupPlanRequest(), + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_get_backup_plan_flattened_async(): +async def test_update_backup_plan_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.BackupPlan() + call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlan() + operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup_plan( - name="name_value", + response = await client.update_backup_plan( + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup_plan + mock_val = backupplan.BackupPlan(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_get_backup_plan_flattened_error_async(): +async def test_update_backup_plan_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9072,20 +9090,21 @@ async def test_get_backup_plan_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup_plan( - backupplan.GetBackupPlanRequest(), - name="name_value", + await client.update_backup_plan( + backupplan.UpdateBackupPlanRequest(), + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - backupplan.ListBackupPlansRequest, + backupplan.GetBackupPlanRequest, dict, ], ) -def test_list_backup_plans(request_type, transport: str = "grpc"): +def test_get_backup_plan(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9096,29 +9115,45 @@ def test_list_backup_plans(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + call.return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + log_retention_days=1929, + supported_resource_types=["supported_resource_types_value"], + revision_id="revision_id_value", + revision_name="revision_name_value", ) - response = client.list_backup_plans(request) + response = client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplan.ListBackupPlansRequest() + request = backupplan.GetBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlansPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + assert response.log_retention_days == 1929 + assert response.supported_resource_types == ["supported_resource_types_value"] + assert response.revision_id == "revision_id_value" + assert response.revision_name == "revision_name_value" -def test_list_backup_plans_non_empty_request_with_auto_populated_field(): +def test_get_backup_plan_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -9129,32 +9164,24 @@ def test_list_backup_plans_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplan.ListBackupPlansRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + request = backupplan.GetBackupPlanRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_backup_plans(request=request) + client.get_backup_plan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.ListBackupPlansRequest( - parent="parent_value", - page_token="page_token_value", - filter="filter_value", - order_by="order_by_value", + assert args[0] == backupplan.GetBackupPlanRequest( + name="name_value", ) -def test_list_backup_plans_use_cached_wrapped_rpc(): +def test_get_backup_plan_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -9168,23 +9195,21 @@ def test_list_backup_plans_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backup_plans in client._transport._wrapped_methods + assert client._transport.get_backup_plan in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_backup_plans - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc request = {} - client.list_backup_plans(request) + client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_plans(request) + client.get_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9192,7 +9217,7 @@ def test_list_backup_plans_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_backup_plans_async_use_cached_wrapped_rpc( +async def test_get_backup_plan_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9209,7 +9234,7 @@ async def test_list_backup_plans_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_backup_plans + client._client._transport.get_backup_plan in client._client._transport._wrapped_methods ) @@ -9217,16 +9242,16 @@ async def test_list_backup_plans_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_backup_plans + client._client._transport.get_backup_plan ] = mock_rpc request = {} - await client.list_backup_plans(request) + await client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_backup_plans(request) + await client.get_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9234,8 +9259,8 @@ async def test_list_backup_plans_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_backup_plans_async( - transport: str = "grpc_asyncio", request_type=backupplan.ListBackupPlansRequest +async def test_get_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.GetBackupPlanRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -9247,52 +9272,66 @@ async def test_list_backup_plans_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlansResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - response = await client.list_backup_plans(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + log_retention_days=1929, + supported_resource_types=["supported_resource_types_value"], + revision_id="revision_id_value", + revision_name="revision_name_value", + ) + ) + response = await client.get_backup_plan(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.ListBackupPlansRequest() + request = backupplan.GetBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlansAsyncPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + assert response.log_retention_days == 1929 + assert response.supported_resource_types == ["supported_resource_types_value"] + assert response.revision_id == "revision_id_value" + assert response.revision_name == "revision_name_value" @pytest.mark.asyncio -async def test_list_backup_plans_async_from_dict(): - await test_list_backup_plans_async(request_type=dict) +async def test_get_backup_plan_async_from_dict(): + await test_get_backup_plan_async(request_type=dict) -def test_list_backup_plans_field_headers(): +def test_get_backup_plan_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.ListBackupPlansRequest() + request = backupplan.GetBackupPlanRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: - call.return_value = backupplan.ListBackupPlansResponse() - client.list_backup_plans(request) + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = backupplan.BackupPlan() + client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9303,30 +9342,28 @@ def test_list_backup_plans_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_backup_plans_field_headers_async(): +async def test_get_backup_plan_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.ListBackupPlansRequest() + request = backupplan.GetBackupPlanRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlansResponse() + backupplan.BackupPlan() ) - await client.list_backup_plans(request) + await client.get_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9337,37 +9374,35 @@ async def test_list_backup_plans_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_backup_plans_flattened(): +def test_get_backup_plan_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse() + call.return_value = backupplan.BackupPlan() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_backup_plans( - parent="parent_value", + client.get_backup_plan( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_backup_plans_flattened_error(): +def test_get_backup_plan_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9375,45 +9410,43 @@ def test_list_backup_plans_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_plans( - backupplan.ListBackupPlansRequest(), - parent="parent_value", + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_list_backup_plans_flattened_async(): +async def test_get_backup_plan_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = backupplan.ListBackupPlansResponse() + call.return_value = backupplan.BackupPlan() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlansResponse() + backupplan.BackupPlan() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_backup_plans( - parent="parent_value", + response = await client.get_backup_plan( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_list_backup_plans_flattened_error_async(): +async def test_get_backup_plan_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9421,296 +9454,103 @@ async def test_list_backup_plans_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_backup_plans( - backupplan.ListBackupPlansRequest(), - parent="parent_value", + await client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", ) -def test_list_backup_plans_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize( + "request_type", + [ + backupplan.ListBackupPlansRequest, + dict, + ], +) +def test_list_backup_plans(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_backup_plans), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Designate an appropriate return value for the call. + call.return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) + response = client.list_backup_plans(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplan.ListBackupPlansRequest() + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupplan.BackupPlan) for i in results) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_backup_plans_pages(transport_name: str = "grpc"): +def test_list_backup_plans_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_backup_plans), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plans(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplan.ListBackupPlansRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) - pages = list(client.list_backup_plans(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -@pytest.mark.asyncio -async def test_list_backup_plans_async_pager(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_backup_plans( - request={}, - ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, backupplan.BackupPlan) for i in responses) - - -@pytest.mark.asyncio -async def test_list_backup_plans_async_pages(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_backup_plans(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize( - "request_type", - [ - backupplan.DeleteBackupPlanRequest, - dict, - ], -) -def test_delete_backup_plan(request_type, transport: str = "grpc"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup_plan(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - request = backupplan.DeleteBackupPlanRequest() - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - - -def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = backupplan.DeleteBackupPlanRequest( - name="name_value", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_backup_plan(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == backupplan.DeleteBackupPlanRequest( - name="name_value", - ) - - -def test_delete_backup_plan_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) +def test_list_backup_plans_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_backup_plan in client._transport._wrapped_methods - ) + assert client._transport.list_backup_plans in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -9718,20 +9558,15 @@ def test_delete_backup_plan_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_backup_plan + client._transport.list_backup_plans ] = mock_rpc request = {} - client.delete_backup_plan(request) + client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_plan(request) + client.list_backup_plans(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9739,7 +9574,7 @@ def test_delete_backup_plan_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_backup_plan_async_use_cached_wrapped_rpc( +async def test_list_backup_plans_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9756,7 +9591,7 @@ async def test_delete_backup_plan_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_backup_plan + client._client._transport.list_backup_plans in client._client._transport._wrapped_methods ) @@ -9764,21 +9599,16 @@ async def test_delete_backup_plan_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_backup_plan + client._client._transport.list_backup_plans ] = mock_rpc request = {} - await client.delete_backup_plan(request) + await client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.delete_backup_plan(request) + await client.list_backup_plans(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -9786,8 +9616,8 @@ async def test_delete_backup_plan_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_backup_plan_async( - transport: str = "grpc_asyncio", request_type=backupplan.DeleteBackupPlanRequest +async def test_list_backup_plans_async( + transport: str = "grpc_asyncio", request_type=backupplan.ListBackupPlansRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -9800,46 +9630,51 @@ async def test_delete_backup_plan_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) ) - response = await client.delete_backup_plan(request) + response = await client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplan.DeleteBackupPlanRequest() + request = backupplan.ListBackupPlansRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, pagers.ListBackupPlansAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_delete_backup_plan_async_from_dict(): - await test_delete_backup_plan_async(request_type=dict) +async def test_list_backup_plans_async_from_dict(): + await test_list_backup_plans_async(request_type=dict) -def test_delete_backup_plan_field_headers(): +def test_list_backup_plans_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.DeleteBackupPlanRequest() + request = backupplan.ListBackupPlansRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup_plan(request) + call.return_value = backupplan.ListBackupPlansResponse() + client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -9850,30 +9685,30 @@ def test_delete_backup_plan_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_backup_plan_field_headers_async(): +async def test_list_backup_plans_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplan.DeleteBackupPlanRequest() + request = backupplan.ListBackupPlansRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + backupplan.ListBackupPlansResponse() ) - await client.delete_backup_plan(request) + await client.list_backup_plans(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -9884,37 +9719,37 @@ async def test_delete_backup_plan_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_backup_plan_flattened(): +def test_list_backup_plans_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = backupplan.ListBackupPlansResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_backup_plan( - name="name_value", + client.list_backup_plans( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_delete_backup_plan_flattened_error(): +def test_list_backup_plans_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -9922,45 +9757,45 @@ def test_delete_backup_plan_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup_plan( - backupplan.DeleteBackupPlanRequest(), - name="name_value", + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_delete_backup_plan_flattened_async(): +async def test_list_backup_plans_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" + type(client.transport.list_backup_plans), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = backupplan.ListBackupPlansResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + backupplan.ListBackupPlansResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_backup_plan( - name="name_value", + response = await client.list_backup_plans( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_backup_plan_flattened_error_async(): +async def test_list_backup_plans_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -9968,20 +9803,222 @@ async def test_delete_backup_plan_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_backup_plan( - backupplan.DeleteBackupPlanRequest(), - name="name_value", + await client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", + ) + + +def test_list_backup_plans_pager(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plans(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) + + +def test_list_backup_plans_pages(transport_name: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + pages = list(client.list_backup_plans(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plans( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in responses) + + +@pytest.mark.asyncio +async def test_list_backup_plans_async_pages(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plans(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - backupplanassociation.CreateBackupPlanAssociationRequest, + backupplan.DeleteBackupPlanRequest, dict, ], ) -def test_create_backup_plan_association(request_type, transport: str = "grpc"): +def test_delete_backup_plan(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9993,23 +10030,23 @@ def test_create_backup_plan_association(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_backup_plan_association(request) + response = client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = backupplan.DeleteBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): +def test_delete_backup_plan_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -10020,28 +10057,26 @@ def test_create_backup_plan_association_non_empty_request_with_auto_populated_fi # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.CreateBackupPlanAssociationRequest( - parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", + request = backupplan.DeleteBackupPlanRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_backup_plan_association(request=request) + client.delete_backup_plan(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( - parent="parent_value", - backup_plan_association_id="backup_plan_association_id_value", + assert args[0] == backupplan.DeleteBackupPlanRequest( + name="name_value", ) -def test_create_backup_plan_association_use_cached_wrapped_rpc(): +def test_delete_backup_plan_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10056,8 +10091,7 @@ def test_create_backup_plan_association_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_backup_plan_association - in client._transport._wrapped_methods + client._transport.delete_backup_plan in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -10066,10 +10100,10 @@ def test_create_backup_plan_association_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_backup_plan_association + client._transport.delete_backup_plan ] = mock_rpc request = {} - client.create_backup_plan_association(request) + client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -10079,7 +10113,7 @@ def test_create_backup_plan_association_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_backup_plan_association(request) + client.delete_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10087,7 +10121,7 @@ def test_create_backup_plan_association_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( +async def test_delete_backup_plan_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10104,7 +10138,7 @@ async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_backup_plan_association + client._client._transport.delete_backup_plan in client._client._transport._wrapped_methods ) @@ -10112,11 +10146,11 @@ async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_backup_plan_association + client._client._transport.delete_backup_plan ] = mock_rpc request = {} - await client.create_backup_plan_association(request) + await client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -10126,7 +10160,7 @@ async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.create_backup_plan_association(request) + await client.delete_backup_plan(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10134,9 +10168,8 @@ async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_backup_plan_association_async( - transport: str = "grpc_asyncio", - request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +async def test_delete_backup_plan_async( + transport: str = "grpc_asyncio", request_type=backupplan.DeleteBackupPlanRequest ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -10149,18 +10182,18 @@ async def test_create_backup_plan_association_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.create_backup_plan_association(request) + response = await client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = backupplan.DeleteBackupPlanRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -10168,27 +10201,27 @@ async def test_create_backup_plan_association_async( @pytest.mark.asyncio -async def test_create_backup_plan_association_async_from_dict(): - await test_create_backup_plan_association_async(request_type=dict) +async def test_delete_backup_plan_async_from_dict(): + await test_delete_backup_plan_async(request_type=dict) -def test_create_backup_plan_association_field_headers(): +def test_delete_backup_plan_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = backupplan.DeleteBackupPlanRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup_plan_association(request) + client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10199,30 +10232,30 @@ def test_create_backup_plan_association_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_backup_plan_association_field_headers_async(): +async def test_delete_backup_plan_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = backupplan.DeleteBackupPlanRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.create_backup_plan_association(request) + await client.delete_backup_plan(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10233,47 +10266,37 @@ async def test_create_backup_plan_association_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_backup_plan_association_flattened(): +def test_delete_backup_plan_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_backup_plan_association( - parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", + client.delete_backup_plan( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_plan_association - mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") - assert arg == mock_val - arg = args[0].backup_plan_association_id - mock_val = "backup_plan_association_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_backup_plan_association_flattened_error(): +def test_delete_backup_plan_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10281,25 +10304,21 @@ def test_create_backup_plan_association_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_plan_association( - backupplanassociation.CreateBackupPlanAssociationRequest(), - parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_backup_plan_association_flattened_async(): +async def test_delete_backup_plan_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" + type(client.transport.delete_backup_plan), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -10309,31 +10328,21 @@ async def test_create_backup_plan_association_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_backup_plan_association( - parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", + response = await client.delete_backup_plan( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].backup_plan_association - mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") - assert arg == mock_val - arg = args[0].backup_plan_association_id - mock_val = "backup_plan_association_id_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_backup_plan_association_flattened_error_async(): +async def test_delete_backup_plan_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10341,24 +10350,20 @@ async def test_create_backup_plan_association_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_backup_plan_association( - backupplanassociation.CreateBackupPlanAssociationRequest(), - parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", + await client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.GetBackupPlanAssociationRequest, + backupplan.GetBackupPlanRevisionRequest, dict, ], ) -def test_get_backup_plan_association(request_type, transport: str = "grpc"): +def test_get_backup_plan_revision(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10370,36 +10375,30 @@ def test_get_backup_plan_association(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation( + call.return_value = backupplan.BackupPlanRevision( name="name_value", - resource_type="resource_type_value", - resource="resource_value", - backup_plan="backup_plan_value", - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source="data_source_value", + revision_id="revision_id_value", + state=backupplan.BackupPlanRevision.State.CREATING, ) - response = client.get_backup_plan_association(request) + response = client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplan.GetBackupPlanRevisionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert isinstance(response, backupplan.BackupPlanRevision) assert response.name == "name_value" - assert response.resource_type == "resource_type_value" - assert response.resource == "resource_value" - assert response.backup_plan == "backup_plan_value" - assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING - assert response.data_source == "data_source_value" + assert response.revision_id == "revision_id_value" + assert response.state == backupplan.BackupPlanRevision.State.CREATING -def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): +def test_get_backup_plan_revision_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -10410,26 +10409,26 @@ def test_get_backup_plan_association_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.GetBackupPlanAssociationRequest( + request = backupplan.GetBackupPlanRevisionRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_backup_plan_association(request=request) + client.get_backup_plan_revision(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( + assert args[0] == backupplan.GetBackupPlanRevisionRequest( name="name_value", ) -def test_get_backup_plan_association_use_cached_wrapped_rpc(): +def test_get_backup_plan_revision_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10444,7 +10443,7 @@ def test_get_backup_plan_association_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_backup_plan_association + client._transport.get_backup_plan_revision in client._transport._wrapped_methods ) @@ -10454,15 +10453,15 @@ def test_get_backup_plan_association_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_backup_plan_association + client._transport.get_backup_plan_revision ] = mock_rpc request = {} - client.get_backup_plan_association(request) + client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_plan_association(request) + client.get_backup_plan_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10470,7 +10469,7 @@ def test_get_backup_plan_association_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( +async def test_get_backup_plan_revision_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10487,7 +10486,7 @@ async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_backup_plan_association + client._client._transport.get_backup_plan_revision in client._client._transport._wrapped_methods ) @@ -10495,16 +10494,16 @@ async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_backup_plan_association + client._client._transport.get_backup_plan_revision ] = mock_rpc request = {} - await client.get_backup_plan_association(request) + await client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_backup_plan_association(request) + await client.get_backup_plan_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10512,9 +10511,9 @@ async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_backup_plan_association_async( +async def test_get_backup_plan_revision_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.GetBackupPlanAssociationRequest, + request_type=backupplan.GetBackupPlanRevisionRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -10527,59 +10526,53 @@ async def test_get_backup_plan_association_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.BackupPlanAssociation( + backupplan.BackupPlanRevision( name="name_value", - resource_type="resource_type_value", - resource="resource_value", - backup_plan="backup_plan_value", - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source="data_source_value", + revision_id="revision_id_value", + state=backupplan.BackupPlanRevision.State.CREATING, ) ) - response = await client.get_backup_plan_association(request) + response = await client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplan.GetBackupPlanRevisionRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert isinstance(response, backupplan.BackupPlanRevision) assert response.name == "name_value" - assert response.resource_type == "resource_type_value" - assert response.resource == "resource_value" - assert response.backup_plan == "backup_plan_value" - assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING - assert response.data_source == "data_source_value" + assert response.revision_id == "revision_id_value" + assert response.state == backupplan.BackupPlanRevision.State.CREATING @pytest.mark.asyncio -async def test_get_backup_plan_association_async_from_dict(): - await test_get_backup_plan_association_async(request_type=dict) +async def test_get_backup_plan_revision_async_from_dict(): + await test_get_backup_plan_revision_async(request_type=dict) -def test_get_backup_plan_association_field_headers(): +def test_get_backup_plan_revision_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplan.GetBackupPlanRevisionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: - call.return_value = backupplanassociation.BackupPlanAssociation() - client.get_backup_plan_association(request) + call.return_value = backupplan.BackupPlanRevision() + client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10595,25 +10588,25 @@ def test_get_backup_plan_association_field_headers(): @pytest.mark.asyncio -async def test_get_backup_plan_association_field_headers_async(): +async def test_get_backup_plan_revision_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplan.GetBackupPlanRevisionRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.BackupPlanAssociation() + backupplan.BackupPlanRevision() ) - await client.get_backup_plan_association(request) + await client.get_backup_plan_revision(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10628,20 +10621,20 @@ async def test_get_backup_plan_association_field_headers_async(): ) in kw["metadata"] -def test_get_backup_plan_association_flattened(): +def test_get_backup_plan_revision_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation() + call.return_value = backupplan.BackupPlanRevision() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_backup_plan_association( + client.get_backup_plan_revision( name="name_value", ) @@ -10654,7 +10647,7 @@ def test_get_backup_plan_association_flattened(): assert arg == mock_val -def test_get_backup_plan_association_flattened_error(): +def test_get_backup_plan_revision_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -10662,31 +10655,31 @@ def test_get_backup_plan_association_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_plan_association( - backupplanassociation.GetBackupPlanAssociationRequest(), + client.get_backup_plan_revision( + backupplan.GetBackupPlanRevisionRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_backup_plan_association_flattened_async(): +async def test_get_backup_plan_revision_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" + type(client.transport.get_backup_plan_revision), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.BackupPlanAssociation() + call.return_value = backupplan.BackupPlanRevision() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.BackupPlanAssociation() + backupplan.BackupPlanRevision() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_backup_plan_association( + response = await client.get_backup_plan_revision( name="name_value", ) @@ -10700,7 +10693,7 @@ async def test_get_backup_plan_association_flattened_async(): @pytest.mark.asyncio -async def test_get_backup_plan_association_flattened_error_async(): +async def test_get_backup_plan_revision_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -10708,8 +10701,8 @@ async def test_get_backup_plan_association_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_backup_plan_association( - backupplanassociation.GetBackupPlanAssociationRequest(), + await client.get_backup_plan_revision( + backupplan.GetBackupPlanRevisionRequest(), name="name_value", ) @@ -10717,11 +10710,11 @@ async def test_get_backup_plan_association_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - backupplanassociation.ListBackupPlanAssociationsRequest, + backupplan.ListBackupPlanRevisionsRequest, dict, ], ) -def test_list_backup_plan_associations(request_type, transport: str = "grpc"): +def test_list_backup_plan_revisions(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -10733,28 +10726,28 @@ def test_list_backup_plan_associations(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + call.return_value = backupplan.ListBackupPlanRevisionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) - response = client.list_backup_plan_associations(request) + response = client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = backupplan.ListBackupPlanRevisionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert isinstance(response, pagers.ListBackupPlanRevisionsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] -def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): +def test_list_backup_plan_revisions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -10765,30 +10758,28 @@ def test_list_backup_plan_associations_non_empty_request_with_auto_populated_fie # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.ListBackupPlanAssociationsRequest( + request = backupplan.ListBackupPlanRevisionsRequest( parent="parent_value", page_token="page_token_value", - filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_backup_plan_associations(request=request) + client.list_backup_plan_revisions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( + assert args[0] == backupplan.ListBackupPlanRevisionsRequest( parent="parent_value", page_token="page_token_value", - filter="filter_value", ) -def test_list_backup_plan_associations_use_cached_wrapped_rpc(): +def test_list_backup_plan_revisions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -10803,7 +10794,7 @@ def test_list_backup_plan_associations_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_backup_plan_associations + client._transport.list_backup_plan_revisions in client._transport._wrapped_methods ) @@ -10813,15 +10804,15 @@ def test_list_backup_plan_associations_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_backup_plan_associations + client._transport.list_backup_plan_revisions ] = mock_rpc request = {} - client.list_backup_plan_associations(request) + client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_plan_associations(request) + client.list_backup_plan_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10829,7 +10820,7 @@ def test_list_backup_plan_associations_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( +async def test_list_backup_plan_revisions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10846,7 +10837,7 @@ async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_backup_plan_associations + client._client._transport.list_backup_plan_revisions in client._client._transport._wrapped_methods ) @@ -10854,16 +10845,16 @@ async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_backup_plan_associations + client._client._transport.list_backup_plan_revisions ] = mock_rpc request = {} - await client.list_backup_plan_associations(request) + await client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_backup_plan_associations(request) + await client.list_backup_plan_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -10871,9 +10862,9 @@ async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_backup_plan_associations_async( +async def test_list_backup_plan_revisions_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.ListBackupPlanAssociationsRequest, + request_type=backupplan.ListBackupPlanRevisionsRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -10886,51 +10877,51 @@ async def test_list_backup_plan_associations_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.ListBackupPlanAssociationsResponse( + backupplan.ListBackupPlanRevisionsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) - response = await client.list_backup_plan_associations(request) + response = await client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = backupplan.ListBackupPlanRevisionsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) + assert isinstance(response, pagers.ListBackupPlanRevisionsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio -async def test_list_backup_plan_associations_async_from_dict(): - await test_list_backup_plan_associations_async(request_type=dict) +async def test_list_backup_plan_revisions_async_from_dict(): + await test_list_backup_plan_revisions_async(request_type=dict) -def test_list_backup_plan_associations_field_headers(): +def test_list_backup_plan_revisions_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = backupplan.ListBackupPlanRevisionsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.list_backup_plan_associations(request) + call.return_value = backupplan.ListBackupPlanRevisionsResponse() + client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -10946,25 +10937,25 @@ def test_list_backup_plan_associations_field_headers(): @pytest.mark.asyncio -async def test_list_backup_plan_associations_field_headers_async(): +async def test_list_backup_plan_revisions_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = backupplan.ListBackupPlanRevisionsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.ListBackupPlanAssociationsResponse() + backupplan.ListBackupPlanRevisionsResponse() ) - await client.list_backup_plan_associations(request) + await client.list_backup_plan_revisions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -10979,20 +10970,20 @@ async def test_list_backup_plan_associations_field_headers_async(): ) in kw["metadata"] -def test_list_backup_plan_associations_flattened(): +def test_list_backup_plan_revisions_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + call.return_value = backupplan.ListBackupPlanRevisionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_backup_plan_associations( + client.list_backup_plan_revisions( parent="parent_value", ) @@ -11005,7 +10996,7 @@ def test_list_backup_plan_associations_flattened(): assert arg == mock_val -def test_list_backup_plan_associations_flattened_error(): +def test_list_backup_plan_revisions_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11013,31 +11004,31 @@ def test_list_backup_plan_associations_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_plan_associations( - backupplanassociation.ListBackupPlanAssociationsRequest(), + client.list_backup_plan_revisions( + backupplan.ListBackupPlanRevisionsRequest(), parent="parent_value", ) @pytest.mark.asyncio -async def test_list_backup_plan_associations_flattened_async(): +async def test_list_backup_plan_revisions_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + call.return_value = backupplan.ListBackupPlanRevisionsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.ListBackupPlanAssociationsResponse() + backupplan.ListBackupPlanRevisionsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_backup_plan_associations( + response = await client.list_backup_plan_revisions( parent="parent_value", ) @@ -11051,7 +11042,7 @@ async def test_list_backup_plan_associations_flattened_async(): @pytest.mark.asyncio -async def test_list_backup_plan_associations_flattened_error_async(): +async def test_list_backup_plan_revisions_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11059,13 +11050,13 @@ async def test_list_backup_plan_associations_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_backup_plan_associations( - backupplanassociation.ListBackupPlanAssociationsRequest(), + await client.list_backup_plan_revisions( + backupplan.ListBackupPlanRevisionsRequest(), parent="parent_value", ) -def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): +def test_list_backup_plan_revisions_pager(transport_name: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -11073,32 +11064,32 @@ def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), ], next_page_token="abc", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], next_page_token="def", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), ], next_page_token="ghi", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), ], ), RuntimeError, @@ -11110,7 +11101,7 @@ def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_backup_plan_associations( + pager = client.list_backup_plan_revisions( request={}, retry=retry, timeout=timeout ) @@ -11120,12 +11111,10 @@ def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): results = list(pager) assert len(results) == 6 - assert all( - isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results - ) + assert all(isinstance(i, backupplan.BackupPlanRevision) for i in results) -def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): +def test_list_backup_plan_revisions_pages(transport_name: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, @@ -11133,82 +11122,82 @@ def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" + type(client.transport.list_backup_plan_revisions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), ], next_page_token="abc", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], next_page_token="def", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), ], next_page_token="ghi", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), ], ), RuntimeError, ) - pages = list(client.list_backup_plan_associations(request={}).pages) + pages = list(client.list_backup_plan_revisions(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_backup_plan_associations_async_pager(): +async def test_list_backup_plan_revisions_async_pager(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), + type(client.transport.list_backup_plan_revisions), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), ], next_page_token="abc", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], next_page_token="def", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), ], next_page_token="ghi", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), ], ), RuntimeError, ) - async_pager = await client.list_backup_plan_associations( + async_pager = await client.list_backup_plan_revisions( request={}, ) assert async_pager.next_page_token == "abc" @@ -11217,48 +11206,45 @@ async def test_list_backup_plan_associations_async_pager(): responses.append(response) assert len(responses) == 6 - assert all( - isinstance(i, backupplanassociation.BackupPlanAssociation) - for i in responses - ) + assert all(isinstance(i, backupplan.BackupPlanRevision) for i in responses) @pytest.mark.asyncio -async def test_list_backup_plan_associations_async_pages(): +async def test_list_backup_plan_revisions_async_pages(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_backup_plan_associations), + type(client.transport.list_backup_plan_revisions), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), ], next_page_token="abc", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], next_page_token="def", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), ], next_page_token="ghi", ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), ], ), RuntimeError, @@ -11267,7 +11253,7 @@ async def test_list_backup_plan_associations_async_pages(): # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 async for page_ in ( # pragma: no branch - await client.list_backup_plan_associations(request={}) + await client.list_backup_plan_revisions(request={}) ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): @@ -11277,11 +11263,11 @@ async def test_list_backup_plan_associations_async_pages(): @pytest.mark.parametrize( "request_type", [ - backupplanassociation.DeleteBackupPlanAssociationRequest, + backupplanassociation.CreateBackupPlanAssociationRequest, dict, ], ) -def test_delete_backup_plan_association(request_type, transport: str = "grpc"): +def test_create_backup_plan_association(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11293,23 +11279,23 @@ def test_delete_backup_plan_association(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.delete_backup_plan_association(request) + response = client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = backupplanassociation.CreateBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): +def test_create_backup_plan_association_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -11320,26 +11306,28 @@ def test_delete_backup_plan_association_non_empty_request_with_auto_populated_fi # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.DeleteBackupPlanAssociationRequest( - name="name_value", + request = backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.delete_backup_plan_association(request=request) + client.create_backup_plan_association(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( - name="name_value", + assert args[0] == backupplanassociation.CreateBackupPlanAssociationRequest( + parent="parent_value", + backup_plan_association_id="backup_plan_association_id_value", ) -def test_delete_backup_plan_association_use_cached_wrapped_rpc(): +def test_create_backup_plan_association_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11354,7 +11342,7 @@ def test_delete_backup_plan_association_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_backup_plan_association + client._transport.create_backup_plan_association in client._transport._wrapped_methods ) @@ -11364,10 +11352,10 @@ def test_delete_backup_plan_association_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_backup_plan_association + client._transport.create_backup_plan_association ] = mock_rpc request = {} - client.delete_backup_plan_association(request) + client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -11377,7 +11365,7 @@ def test_delete_backup_plan_association_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup_plan_association(request) + client.create_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11385,7 +11373,7 @@ def test_delete_backup_plan_association_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( +async def test_create_backup_plan_association_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11402,7 +11390,7 @@ async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.delete_backup_plan_association + client._client._transport.create_backup_plan_association in client._client._transport._wrapped_methods ) @@ -11410,11 +11398,11 @@ async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.delete_backup_plan_association + client._client._transport.create_backup_plan_association ] = mock_rpc request = {} - await client.delete_backup_plan_association(request) + await client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -11424,7 +11412,7 @@ async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.delete_backup_plan_association(request) + await client.create_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11432,9 +11420,9 @@ async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_delete_backup_plan_association_async( +async def test_create_backup_plan_association_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -11447,18 +11435,18 @@ async def test_delete_backup_plan_association_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.delete_backup_plan_association(request) + response = await client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = backupplanassociation.CreateBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -11466,27 +11454,27 @@ async def test_delete_backup_plan_association_async( @pytest.mark.asyncio -async def test_delete_backup_plan_association_async_from_dict(): - await test_delete_backup_plan_association_async(request_type=dict) +async def test_create_backup_plan_association_async_from_dict(): + await test_create_backup_plan_association_async(request_type=dict) -def test_delete_backup_plan_association_field_headers(): +def test_create_backup_plan_association_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = backupplanassociation.CreateBackupPlanAssociationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup_plan_association(request) + client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11497,30 +11485,30 @@ def test_delete_backup_plan_association_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_delete_backup_plan_association_field_headers_async(): +async def test_create_backup_plan_association_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = backupplanassociation.CreateBackupPlanAssociationRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.delete_backup_plan_association(request) + await client.create_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11531,37 +11519,47 @@ async def test_delete_backup_plan_association_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_delete_backup_plan_association_flattened(): +def test_create_backup_plan_association_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_backup_plan_association( - name="name_value", + client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" assert arg == mock_val -def test_delete_backup_plan_association_flattened_error(): +def test_create_backup_plan_association_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11569,21 +11567,25 @@ def test_delete_backup_plan_association_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup_plan_association( - backupplanassociation.DeleteBackupPlanAssociationRequest(), - name="name_value", + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", ) @pytest.mark.asyncio -async def test_delete_backup_plan_association_flattened_async(): +async def test_create_backup_plan_association_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" + type(client.transport.create_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -11593,21 +11595,31 @@ async def test_delete_backup_plan_association_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_backup_plan_association( - name="name_value", + response = await client.create_backup_plan_association( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") + assert arg == mock_val + arg = args[0].backup_plan_association_id + mock_val = "backup_plan_association_id_value" assert arg == mock_val @pytest.mark.asyncio -async def test_delete_backup_plan_association_flattened_error_async(): +async def test_create_backup_plan_association_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11615,20 +11627,24 @@ async def test_delete_backup_plan_association_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_backup_plan_association( - backupplanassociation.DeleteBackupPlanAssociationRequest(), - name="name_value", + await client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", ) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.TriggerBackupRequest, + backupplanassociation.UpdateBackupPlanAssociationRequest, dict, ], ) -def test_trigger_backup(request_type, transport: str = "grpc"): +def test_update_backup_plan_association(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11639,22 +11655,24 @@ def test_trigger_backup(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.trigger_backup(request) + response = client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupplanassociation.TriggerBackupRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) -def test_trigger_backup_non_empty_request_with_auto_populated_field(): +def test_update_backup_plan_association_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -11665,26 +11683,22 @@ def test_trigger_backup_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupplanassociation.TriggerBackupRequest( - name="name_value", - rule_id="rule_id_value", - ) + request = backupplanassociation.UpdateBackupPlanAssociationRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.trigger_backup(request=request) + client.update_backup_plan_association(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupplanassociation.TriggerBackupRequest( - name="name_value", - rule_id="rule_id_value", - ) + assert args[0] == backupplanassociation.UpdateBackupPlanAssociationRequest() -def test_trigger_backup_use_cached_wrapped_rpc(): +def test_update_backup_plan_association_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11698,16 +11712,21 @@ def test_trigger_backup_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.trigger_backup in client._transport._wrapped_methods + assert ( + client._transport.update_backup_plan_association + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_backup_plan_association + ] = mock_rpc request = {} - client.trigger_backup(request) + client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -11717,7 +11736,7 @@ def test_trigger_backup_use_cached_wrapped_rpc(): # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.trigger_backup(request) + client.update_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11725,7 +11744,7 @@ def test_trigger_backup_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_trigger_backup_async_use_cached_wrapped_rpc( +async def test_update_backup_plan_association_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11742,7 +11761,7 @@ async def test_trigger_backup_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.trigger_backup + client._client._transport.update_backup_plan_association in client._client._transport._wrapped_methods ) @@ -11750,11 +11769,11 @@ async def test_trigger_backup_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.trigger_backup + client._client._transport.update_backup_plan_association ] = mock_rpc request = {} - await client.trigger_backup(request) + await client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -11764,7 +11783,7 @@ async def test_trigger_backup_async_use_cached_wrapped_rpc( # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - await client.trigger_backup(request) + await client.update_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -11772,9 +11791,9 @@ async def test_trigger_backup_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_trigger_backup_async( +async def test_update_backup_plan_association_async( transport: str = "grpc_asyncio", - request_type=backupplanassociation.TriggerBackupRequest, + request_type=backupplanassociation.UpdateBackupPlanAssociationRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -11786,17 +11805,19 @@ async def test_trigger_backup_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) - response = await client.trigger_backup(request) + response = await client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupplanassociation.TriggerBackupRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. @@ -11804,25 +11825,27 @@ async def test_trigger_backup_async( @pytest.mark.asyncio -async def test_trigger_backup_async_from_dict(): - await test_trigger_backup_async(request_type=dict) +async def test_update_backup_plan_association_async_from_dict(): + await test_update_backup_plan_association_async(request_type=dict) -def test_trigger_backup_field_headers(): +def test_update_backup_plan_association_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.TriggerBackupRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() - request.name = "name_value" + request.backup_plan_association.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: call.return_value = operations_pb2.Operation(name="operations/op") - client.trigger_backup(request) + client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -11833,28 +11856,30 @@ def test_trigger_backup_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup_plan_association.name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_trigger_backup_field_headers_async(): +async def test_update_backup_plan_association_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupplanassociation.TriggerBackupRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() - request.name = "name_value" + request.backup_plan_association.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) - await client.trigger_backup(request) + await client.update_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -11865,39 +11890,43 @@ async def test_trigger_backup_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "backup_plan_association.name=name_value", ) in kw["metadata"] -def test_trigger_backup_flattened(): +def test_update_backup_plan_association_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.trigger_backup( - name="name_value", - rule_id="rule_id_value", + client.update_backup_plan_association( + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") assert arg == mock_val - arg = args[0].rule_id - mock_val = "rule_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val -def test_trigger_backup_flattened_error(): +def test_update_backup_plan_association_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -11905,21 +11934,25 @@ def test_trigger_backup_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.trigger_backup( - backupplanassociation.TriggerBackupRequest(), - name="name_value", - rule_id="rule_id_value", + client.update_backup_plan_association( + backupplanassociation.UpdateBackupPlanAssociationRequest(), + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio -async def test_trigger_backup_flattened_async(): +async def test_update_backup_plan_association_flattened_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -11928,25 +11961,27 @@ async def test_trigger_backup_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.trigger_backup( - name="name_value", - rule_id="rule_id_value", + response = await client.update_backup_plan_association( + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].backup_plan_association + mock_val = backupplanassociation.BackupPlanAssociation(name="name_value") assert arg == mock_val - arg = args[0].rule_id - mock_val = "rule_id_value" + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio -async def test_trigger_backup_flattened_error_async(): +async def test_update_backup_plan_association_flattened_error_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) @@ -11954,21 +11989,23 @@ async def test_trigger_backup_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.trigger_backup( - backupplanassociation.TriggerBackupRequest(), - name="name_value", - rule_id="rule_id_value", + await client.update_backup_plan_association( + backupplanassociation.UpdateBackupPlanAssociationRequest(), + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [ - backupdr.InitializeServiceRequest, + backupplanassociation.GetBackupPlanAssociationRequest, dict, ], ) -def test_initialize_service(request_type, transport: str = "grpc"): +def test_get_backup_plan_association(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11980,23 +12017,40 @@ def test_initialize_service(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.initialize_service), "__call__" + type(client.transport.get_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.initialize_service(request) + call.return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + backup_plan_revision_id="backup_plan_revision_id_value", + backup_plan_revision_name="backup_plan_revision_name_value", + ) + response = client.get_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = backupdr.InitializeServiceRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + assert response.backup_plan_revision_id == "backup_plan_revision_id_value" + assert response.backup_plan_revision_name == "backup_plan_revision_name_value" -def test_initialize_service_non_empty_request_with_auto_populated_field(): +def test_get_backup_plan_association_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( @@ -12007,28 +12061,26 @@ def test_initialize_service_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = backupdr.InitializeServiceRequest( + request = backupplanassociation.GetBackupPlanAssociationRequest( name="name_value", - resource_type="resource_type_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.initialize_service), "__call__" + type(client.transport.get_backup_plan_association), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.initialize_service(request=request) + client.get_backup_plan_association(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == backupdr.InitializeServiceRequest( + assert args[0] == backupplanassociation.GetBackupPlanAssociationRequest( name="name_value", - resource_type="resource_type_value", ) -def test_initialize_service_use_cached_wrapped_rpc(): +def test_get_backup_plan_association_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12043,7 +12095,8 @@ def test_initialize_service_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.initialize_service in client._transport._wrapped_methods + client._transport.get_backup_plan_association + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12052,20 +12105,15 @@ def test_initialize_service_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.initialize_service + client._transport.get_backup_plan_association ] = mock_rpc request = {} - client.initialize_service(request) + client.get_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.initialize_service(request) + client.get_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12073,7 +12121,7 @@ def test_initialize_service_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_initialize_service_async_use_cached_wrapped_rpc( +async def test_get_backup_plan_association_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12090,7 +12138,7 @@ async def test_initialize_service_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.initialize_service + client._client._transport.get_backup_plan_association in client._client._transport._wrapped_methods ) @@ -12098,21 +12146,16 @@ async def test_initialize_service_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.initialize_service + client._client._transport.get_backup_plan_association ] = mock_rpc request = {} - await client.initialize_service(request) + await client.get_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.initialize_service(request) + await client.get_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -12120,8 +12163,9 @@ async def test_initialize_service_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_initialize_service_async( - transport: str = "grpc_asyncio", request_type=backupdr.InitializeServiceRequest +async def test_get_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.GetBackupPlanAssociationRequest, ): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), @@ -12134,46 +12178,63 @@ async def test_initialize_service_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.initialize_service), "__call__" + type(client.transport.get_backup_plan_association), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + backup_plan_revision_id="backup_plan_revision_id_value", + backup_plan_revision_name="backup_plan_revision_name_value", + ) ) - response = await client.initialize_service(request) + response = await client.get_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = backupdr.InitializeServiceRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + assert response.backup_plan_revision_id == "backup_plan_revision_id_value" + assert response.backup_plan_revision_name == "backup_plan_revision_name_value" @pytest.mark.asyncio -async def test_initialize_service_async_from_dict(): - await test_initialize_service_async(request_type=dict) +async def test_get_backup_plan_association_async_from_dict(): + await test_get_backup_plan_association_async(request_type=dict) -def test_initialize_service_field_headers(): +def test_get_backup_plan_association_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupdr.InitializeServiceRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.initialize_service), "__call__" + type(client.transport.get_backup_plan_association), "__call__" ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.initialize_service(request) + call.return_value = backupplanassociation.BackupPlanAssociation() + client.get_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -12189,25 +12250,25 @@ def test_initialize_service_field_headers(): @pytest.mark.asyncio -async def test_initialize_service_field_headers_async(): +async def test_get_backup_plan_association_field_headers_async(): client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = backupdr.InitializeServiceRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.initialize_service), "__call__" + type(client.transport.get_backup_plan_association), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + backupplanassociation.BackupPlanAssociation() ) - await client.initialize_service(request) + await client.get_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -12222,278 +12283,173 @@ async def test_initialize_service_field_headers_async(): ) in kw["metadata"] -def test_list_management_servers_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +def test_get_backup_plan_association_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Ensure method has been cached - assert ( - client._transport.list_management_servers - in client._transport._wrapped_methods + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_backup_plan_association( + name="name_value", ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.list_management_servers - ] = mock_rpc - - request = {} - client.list_management_servers(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.list_management_servers(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_list_management_servers_rest_required_fields( - request_type=backupdr.ListManagementServersRequest, -): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_management_servers._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +def test_get_backup_plan_association_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.BackupPlanAssociation() - response = client.list_management_servers(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_backup_plan_association( + name="name_value", + ) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_management_servers_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.list_management_servers._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", ) - & set(("parent",)) - ) -def test_list_management_servers_rest_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.ListBackupPlanAssociationsRequest, + dict, + ], +) +def test_list_backup_plan_associations(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backup_plan_associations(request) - client.list_management_servers(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" - % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] -def test_list_management_servers_rest_flattened_error(transport: str = "rest"): +def test_list_backup_plan_associations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_management_servers( - backupdr.ListManagementServersRequest(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.ListBackupPlanAssociationsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_backup_plan_associations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.ListBackupPlanAssociationsRequest( parent="parent_value", + page_token="page_token_value", + filter="filter_value", ) -def test_list_management_servers_rest_pager(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - next_page_token="abc", - ), - backupdr.ListManagementServersResponse( - management_servers=[], - next_page_token="def", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - ], - next_page_token="ghi", - ), - backupdr.ListManagementServersResponse( - management_servers=[ - backupdr.ManagementServer(), - backupdr.ManagementServer(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backupdr.ListManagementServersResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_management_servers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupdr.ManagementServer) for i in results) - - pages = list(client.list_management_servers(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_management_server_rest_use_cached_wrapped_rpc(): +def test_list_backup_plan_associations_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -12502,7 +12458,7 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_management_server + client._transport.list_backup_plan_associations in client._transport._wrapped_methods ) @@ -12512,173 +12468,31 @@ def test_get_management_server_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_management_server + client._transport.list_backup_plan_associations ] = mock_rpc - request = {} - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_management_server(request) + client.list_backup_plan_associations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_management_server_rest_required_fields( - request_type=backupdr.GetManagementServerRequest, +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_management_server._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.get_management_server(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_get_management_server_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.get_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_management_server_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_management_server(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" - % client.transport._host, - args[1], - ) - - -def test_get_management_server_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_management_server( - backupdr.GetManagementServerRequest(), - name="name_value", - ) - - -def test_create_management_server_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) # Should wrap all calls on client creation @@ -12687,407 +12501,527 @@ def test_create_management_server_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_management_server - in client._transport._wrapped_methods + client._client._transport.list_backup_plan_associations + in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_management_server + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_backup_plan_associations ] = mock_rpc request = {} - client.create_management_server(request) + await client.list_backup_plan_associations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_management_server(request) + await client.list_backup_plan_associations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_management_server_rest_required_fields( - request_type=backupdr.CreateManagementServerRequest, -): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["management_server_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped - assert "managementServerId" not in jsonified_request + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_backup_plan_associations(request) - # verify required fields with default values are now present - assert "managementServerId" in jsonified_request - assert ( - jsonified_request["managementServerId"] == request_init["management_server_id"] - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.ListBackupPlanAssociationsRequest() + assert args[0] == request - jsonified_request["parent"] = "parent_value" - jsonified_request["managementServerId"] = "management_server_id_value" + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupPlanAssociationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_management_server._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "management_server_id", - "request_id", - ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "managementServerId" in jsonified_request - assert jsonified_request["managementServerId"] == "management_server_id_value" +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_from_dict(): + await test_list_backup_plan_associations_async(request_type=dict) + +def test_list_backup_plan_associations_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + client.list_backup_plan_associations(request) - response = client.create_management_server(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "managementServerId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_create_management_server_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_backup_plan_associations_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_management_server._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "managementServerId", - "requestId", - ) - ) - & set( - ( - "parent", - "managementServerId", - "managementServer", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.ListBackupPlanAssociationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() ) - ) + await client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_create_management_server_rest_flattened(): + +def test_list_backup_plan_associations_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_backup_plan_associations( parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_management_server(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/managementServers" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_create_management_server_rest_flattened_error(transport: str = "rest"): +def test_list_backup_plan_associations_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_management_server( - backupdr.CreateManagementServerRequest(), + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), parent="parent_value", - management_server=backupdr.ManagementServer(name="name_value"), - management_server_id="management_server_id_value", ) -def test_delete_management_server_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - # Ensure method has been cached - assert ( - client._transport.delete_management_server - in client._transport._wrapped_methods + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse() ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_backup_plan_associations( + parent="parent_value", ) - client._transport._wrapped_methods[ - client._transport.delete_management_server - ] = mock_rpc - - request = {} - client.delete_management_server(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_management_server(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_delete_management_server_rest_required_fields( - request_type=backupdr.DeleteManagementServerRequest, -): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_management_server._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_management_server(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_delete_management_server_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_backup_plan_associations_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.delete_management_server._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) -def test_delete_management_server_rest_flattened(): +def test_list_backup_plan_associations_pager(transport_name: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_backup_plan_associations( + request={}, retry=retry, timeout=timeout + ) - client.delete_management_server(**mock_args) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/managementServers/*}" - % client.transport._host, - args[1], + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results ) -def test_delete_management_server_rest_flattened_error(transport: str = "rest"): +def test_list_backup_plan_associations_pages(transport_name: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_management_server( - backupdr.DeleteManagementServerRequest(), - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, ) + pages = list(client.list_backup_plan_associations(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_create_backup_vault_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_backup_plan_associations( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_backup_plan_associations_async_pages(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_backup_plan_associations(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + dict, + ], +) +def test_fetch_backup_plan_associations_for_resource_type( + request_type, transport: str = "grpc" +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + next_page_token="next_page_token_value", + ) + ) + response = client.fetch_backup_plan_associations_for_resource_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + ) + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchBackupPlanAssociationsForResourceTypePager) + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_backup_plan_associations_for_resource_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_backup_plan_associations_for_resource_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_fetch_backup_plan_associations_for_resource_type_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13096,7 +13030,8 @@ def test_create_backup_vault_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_backup_vault in client._transport._wrapped_methods + client._transport.fetch_backup_plan_associations_for_resource_type + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13105,477 +13040,572 @@ def test_create_backup_vault_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_backup_vault + client._transport.fetch_backup_plan_associations_for_resource_type ] = mock_rpc - request = {} - client.create_backup_vault(request) + client.fetch_backup_plan_associations_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_vault(request) + client.fetch_backup_plan_associations_for_resource_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_backup_vault_rest_required_fields( - request_type=backupvault.CreateBackupVaultRequest, +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.BackupDRRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request_init["backup_vault_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped - assert "backupVaultId" not in jsonified_request + # Ensure method has been cached + assert ( + client._client._transport.fetch_backup_plan_associations_for_resource_type + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_backup_vault._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_backup_plan_associations_for_resource_type + ] = mock_rpc - # verify required fields with default values are now present - assert "backupVaultId" in jsonified_request - assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] + request = {} + await client.fetch_backup_plan_associations_for_resource_type(request) - jsonified_request["parent"] = "parent_value" - jsonified_request["backupVaultId"] = "backup_vault_id_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "backup_vault_id", - "request_id", - "validate_only", - ) - ) - jsonified_request.update(unset_fields) + await client.fetch_backup_plan_associations_for_resource_type(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "backupVaultId" in jsonified_request - assert jsonified_request["backupVaultId"] == "backup_vault_id_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.fetch_backup_plan_associations_for_resource_type( + request + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + ) + assert args[0] == request - response = client.create_backup_vault(request) + # Establish that the response is the type that we expect. + assert isinstance( + response, pagers.FetchBackupPlanAssociationsForResourceTypeAsyncPager + ) + assert response.next_page_token == "next_page_token_value" - expected_params = [ - ( - "backupVaultId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_async_from_dict(): + await test_fetch_backup_plan_associations_for_resource_type_async(request_type=dict) -def test_create_backup_vault_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials + +def test_fetch_backup_plan_associations_for_resource_type_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.create_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "backupVaultId", - "requestId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "backupVaultId", - "backupVault", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() ) - ) + client.fetch_backup_plan_associations_for_resource_type(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_create_backup_vault_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - backup_vault=backupvault.BackupVault(name="name_value"), - backup_vault_id="backup_vault_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() ) - mock_args.update(sample_request) + await client.fetch_backup_plan_associations_for_resource_type(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.create_backup_vault(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_fetch_backup_plan_associations_for_resource_type_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_backup_plan_associations_for_resource_type( + parent="parent_value", + resource_type="resource_type_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupVaults" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_type + mock_val = "resource_type_value" + assert arg == mock_val -def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): +def test_fetch_backup_plan_associations_for_resource_type_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_vault( - backupvault.CreateBackupVaultRequest(), + client.fetch_backup_plan_associations_for_resource_type( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest(), parent="parent_value", - backup_vault=backupvault.BackupVault(name="name_value"), - backup_vault_id="backup_vault_id_value", + resource_type="resource_type_value", ) -def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Ensure method has been cached - assert ( - client._transport.list_backup_vaults in client._transport._wrapped_methods + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_backup_plan_associations_for_resource_type( + parent="parent_value", + resource_type="resource_type_value", ) - client._transport._wrapped_methods[ - client._transport.list_backup_vaults - ] = mock_rpc - request = {} - client.list_backup_vaults(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_type + mock_val = "resource_type_value" + assert arg == mock_val - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.list_backup_vaults(request) +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_backup_plan_associations_for_resource_type( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest(), + parent="parent_value", + resource_type="resource_type_value", + ) -def test_list_backup_vaults_rest_required_fields( - request_type=backupvault.ListBackupVaultsRequest, +def test_fetch_backup_plan_associations_for_resource_type_pager( + transport_name: str = "grpc", ): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_backup_vaults._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_backup_vaults._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "view", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupVaultsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_backup_vaults(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_backup_vaults_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials + transport=transport_name, ) - unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "view", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, ) - & set(("parent",)) - ) - - -def test_list_backup_vaults_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupVaultsResponse() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.ListBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_backup_vaults(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupVaults" - % client.transport._host, - args[1], + pager = client.fetch_backup_plan_associations_for_resource_type( + request={}, retry=retry, timeout=timeout ) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout -def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_backup_vaults( - backupvault.ListBackupVaultsRequest(), - parent="parent_value", + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results ) -def test_list_backup_vaults_rest_pager(transport: str = "rest"): +def test_fetch_backup_plan_associations_for_resource_type_pages( + transport_name: str = "grpc", +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), ], next_page_token="abc", ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[], + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[], next_page_token="def", ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), ], next_page_token="ghi", ), - backupvault.ListBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), ], ), + RuntimeError, ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backupvault.ListBackupVaultsResponse.to_json(x) for x in response + pages = list( + client.fetch_backup_plan_associations_for_resource_type(request={}).pages ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_backup_vaults(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.BackupVault) for i in results) - - pages = list(client.list_backup_vaults(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_backup_plan_associations_for_resource_type( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_async_pages(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_backup_plan_associations_for_resource_type(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.DeleteBackupPlanAssociationRequest, + dict, + ], +) +def test_delete_backup_plan_association(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_backup_plan_association_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_backup_plan_association(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.DeleteBackupPlanAssociationRequest( + name="name_value", + ) + + +def test_delete_backup_plan_association_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13584,7 +13614,7 @@ def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.fetch_usable_backup_vaults + client._transport.delete_backup_plan_association in client._transport._wrapped_methods ) @@ -13594,253 +13624,333 @@ def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.fetch_usable_backup_vaults + client._transport.delete_backup_plan_association ] = mock_rpc - request = {} - client.fetch_usable_backup_vaults(request) + client.delete_backup_plan_association(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.fetch_usable_backup_vaults(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan_association(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_fetch_usable_backup_vaults_rest_required_fields( - request_type=backupvault.FetchUsableBackupVaultsRequest, +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.BackupDRRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.delete_backup_plan_association + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_backup_plan_association + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.delete_backup_plan_association(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + await client.delete_backup_plan_association(request) - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Designate an appropriate value for the returned response. - return_value = backupvault.FetchUsableBackupVaultsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # Convert return value to protobuf type - return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_backup_plan_association(request) - response = client.fetch_usable_backup_vaults(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_fetch_usable_backup_vaults_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_delete_backup_plan_association_async_from_dict(): + await test_delete_backup_plan_association_async(request_type=dict) + + +def test_delete_backup_plan_association_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.fetch_usable_backup_vaults._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_backup_plan_association_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.DeleteBackupPlanAssociationRequest() + + request.name = "name_value" -def test_fetch_usable_backup_vaults_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_backup_plan_association_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.FetchUsableBackupVaultsResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_backup_plan_association( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + +def test_delete_backup_plan_association_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.fetch_usable_backup_vaults(**mock_args) +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_backup_plan_association( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_fetch_usable_backup_vaults_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_delete_backup_plan_association_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.fetch_usable_backup_vaults( - backupvault.FetchUsableBackupVaultsRequest(), - parent="parent_value", + await client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", ) -def test_fetch_usable_backup_vaults_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.TriggerBackupRequest, + dict, + ], +) +def test_trigger_backup(request_type, transport: str = "grpc"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - next_page_token="abc", - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[], - next_page_token="def", - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - ], - next_page_token="ghi", - ), - backupvault.FetchUsableBackupVaultsResponse( - backup_vaults=[ - backupvault.BackupVault(), - backupvault.BackupVault(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple( - backupvault.FetchUsableBackupVaultsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.trigger_backup(request) - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request - pager = client.fetch_usable_backup_vaults(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.BackupVault) for i in results) - pages = list(client.fetch_usable_backup_vaults(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_trigger_backup_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.trigger_backup(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupplanassociation.TriggerBackupRequest( + name="name_value", + rule_id="rule_id_value", + ) -def test_get_backup_vault_rest_use_cached_wrapped_rpc(): +def test_trigger_backup_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13848,183 +13958,352 @@ def test_get_backup_vault_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup_vault in client._transport._wrapped_methods + assert client._transport.trigger_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_backup_vault - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc request = {} - client.get_backup_vault(request) + client.trigger_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_vault(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.trigger_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_vault_rest_required_fields( - request_type=backupvault.GetBackupVaultRequest, +@pytest.mark.asyncio +async def test_trigger_backup_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_backup_vault._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.trigger_backup + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.trigger_backup + ] = mock_rpc - jsonified_request["name"] = "name_value" + request = {} + await client.trigger_backup(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.trigger_backup(request) - # Designate an appropriate value for the returned response. - return_value = backupvault.BackupVault() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.BackupVault.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_trigger_backup_async( + transport: str = "grpc_asyncio", + request_type=backupplanassociation.TriggerBackupRequest, +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.get_backup_vault(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.trigger_backup(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupplanassociation.TriggerBackupRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_get_backup_vault_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.get_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) +@pytest.mark.asyncio +async def test_trigger_backup_async_from_dict(): + await test_trigger_backup_async(request_type=dict) -def test_get_backup_vault_rest_flattened(): +def test_trigger_backup_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.BackupVault() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" - } + request.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.trigger_backup(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.BackupVault.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.get_backup_vault(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_trigger_backup_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupplanassociation.TriggerBackupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_trigger_backup_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val -def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): +def test_trigger_backup_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_vault( - backupvault.GetBackupVaultRequest(), + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), name="name_value", + rule_id="rule_id_value", ) -def test_update_backup_vault_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_trigger_backup_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.trigger_backup( + name="name_value", + rule_id="rule_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].rule_id + mock_val = "rule_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_trigger_backup_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datasourcereference.GetDataSourceReferenceRequest, + dict, + ], +) +def test_get_data_source_reference(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasourcereference.DataSourceReference( + name="name_value", + data_source="data_source_value", + data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, + data_source_backup_count=2535, + ) + response = client.get_data_source_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datasourcereference.GetDataSourceReferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datasourcereference.DataSourceReference) + assert response.name == "name_value" + assert response.data_source == "data_source_value" + assert ( + response.data_source_backup_config_state == backupvault.BackupConfigState.ACTIVE + ) + assert response.data_source_backup_count == 2535 + + +def test_get_data_source_reference_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datasourcereference.GetDataSourceReferenceRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_source_reference(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datasourcereference.GetDataSourceReferenceRequest( + name="name_value", + ) + + +def test_get_data_source_reference_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14033,7 +14312,8 @@ def test_update_backup_vault_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_backup_vault in client._transport._wrapped_methods + client._transport.get_data_source_reference + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -14042,402 +14322,355 @@ def test_update_backup_vault_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_backup_vault + client._transport.get_data_source_reference ] = mock_rpc - request = {} - client.update_backup_vault(request) + client.get_data_source_reference(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_backup_vault(request) + client.get_data_source_reference(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_backup_vault_rest_required_fields( - request_type=backupvault.UpdateBackupVaultRequest, +@pytest.mark.asyncio +async def test_get_data_source_reference_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.BackupDRRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.get_data_source_reference + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_backup_vault._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_source_reference + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.get_data_source_reference(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "force", - "request_id", - "update_mask", - "validate_only", + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_source_reference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_source_reference_async( + transport: str = "grpc_asyncio", + request_type=datasourcereference.GetDataSourceReferenceRequest, +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.DataSourceReference( + name="name_value", + data_source="data_source_value", + data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, + data_source_backup_count=2535, + ) ) + response = await client.get_data_source_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datasourcereference.GetDataSourceReferenceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datasourcereference.DataSourceReference) + assert response.name == "name_value" + assert response.data_source == "data_source_value" + assert ( + response.data_source_backup_config_state == backupvault.BackupConfigState.ACTIVE ) - jsonified_request.update(unset_fields) + assert response.data_source_backup_count == 2535 - # verify required fields with non-default values are left alone +@pytest.mark.asyncio +async def test_get_data_source_reference_async_from_dict(): + await test_get_data_source_reference_async(request_type=dict) + + +def test_get_data_source_reference_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasourcereference.GetDataSourceReferenceRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + call.return_value = datasourcereference.DataSourceReference() + client.get_data_source_reference(request) - response = client.update_backup_vault(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_update_backup_vault_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_data_source_reference_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.update_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "force", - "requestId", - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "updateMask", - "backupVault", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasourcereference.GetDataSourceReferenceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.DataSourceReference() ) - ) + await client.get_data_source_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_update_backup_vault_rest_flattened(): + +def test_get_data_source_reference_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "backup_vault": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - backup_vault=backupvault.BackupVault(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasourcereference.DataSourceReference() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_source_reference( + name="name_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): +def test_get_data_source_reference_flattened_error(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup_vault( - backupvault.UpdateBackupVaultRequest(), - backup_vault=backupvault.BackupVault(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_data_source_reference( + datasourcereference.GetDataSourceReferenceRequest(), + name="name_value", ) -def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.asyncio +async def test_get_data_source_reference_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datasourcereference.DataSourceReference() - # Ensure method has been cached - assert ( - client._transport.delete_backup_vault in client._transport._wrapped_methods + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.DataSourceReference() ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_source_reference( + name="name_value", ) - client._transport._wrapped_methods[ - client._transport.delete_backup_vault - ] = mock_rpc - - request = {} - client.delete_backup_vault(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - client.delete_backup_vault(request) +@pytest.mark.asyncio +async def test_get_data_source_reference_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_source_reference( + datasourcereference.GetDataSourceReferenceRequest(), + name="name_value", + ) -def test_delete_backup_vault_rest_required_fields( - request_type=backupvault.DeleteBackupVaultRequest, -): - transport_class = transports.BackupDRRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_vault._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_vault._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "allow_missing", - "etag", - "force", - "ignore_backup_plan_references", - "request_id", - "validate_only", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - +@pytest.mark.parametrize( + "request_type", + [ + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, + dict, + ], +) +def test_fetch_data_source_references_for_resource_type( + request_type, transport: str = "grpc" +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.delete_backup_vault(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - -def test_delete_backup_vault_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "allowMissing", - "etag", - "force", - "ignoreBackupPlanReferences", - "requestId", - "validateOnly", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + next_page_token="next_page_token_value", ) ) - & set(("name",)) - ) - - -def test_delete_backup_vault_rest_flattened(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.fetch_data_source_references_for_resource_type(request) - client.delete_backup_vault(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*}" - % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchDataSourceReferencesForResourceTypePager) + assert response.next_page_token == "next_page_token_value" -def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): +def test_fetch_data_source_references_for_resource_type_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_backup_vault( - backupvault.DeleteBackupVaultRequest(), - name="name_value", + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datasourcereference.FetchDataSourceReferencesForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.fetch_data_source_references_for_resource_type(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[ + 0 + ] == datasourcereference.FetchDataSourceReferencesForResourceTypeRequest( + parent="parent_value", + resource_type="resource_type_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", ) -def test_list_data_sources_rest_use_cached_wrapped_rpc(): +def test_fetch_data_source_references_for_resource_type_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14445,7 +14678,10 @@ def test_list_data_sources_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_data_sources in client._transport._wrapped_methods + assert ( + client._transport.fetch_data_source_references_for_resource_type + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -14453,257 +14689,569 @@ def test_list_data_sources_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_data_sources + client._transport.fetch_data_source_references_for_resource_type ] = mock_rpc - request = {} - client.list_data_sources(request) + client.fetch_data_source_references_for_resource_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_data_sources(request) + client.fetch_data_source_references_for_resource_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_data_sources_rest_required_fields( - request_type=backupvault.ListDataSourcesRequest, +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.BackupDRRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.fetch_data_source_references_for_resource_type + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_data_sources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.fetch_data_source_references_for_resource_type + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.fetch_data_source_references_for_resource_type(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_data_sources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + await client.fetch_data_source_references_for_resource_type(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_async( + transport: str = "grpc_asyncio", + request_type=datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = backupvault.ListDataSourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.fetch_data_source_references_for_resource_type(request) - # Convert return value to protobuf type - return_value = backupvault.ListDataSourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance( + response, pagers.FetchDataSourceReferencesForResourceTypeAsyncPager + ) + assert response.next_page_token == "next_page_token_value" - response = client.list_data_sources(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_async_from_dict(): + await test_fetch_data_source_references_for_resource_type_async(request_type=dict) -def test_list_data_sources_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_fetch_data_source_references_for_resource_type_field_headers(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_data_sources._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + call.return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() ) - & set(("parent",)) + client.fetch_data_source_references_for_resource_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() -def test_list_data_sources_rest_flattened(): + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + await client.fetch_data_source_references_for_resource_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_fetch_data_source_references_for_resource_type_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListDataSourcesResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_data_source_references_for_resource_type( + parent="parent_value", + resource_type="resource_type_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_type + mock_val = "resource_type_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_fetch_data_source_references_for_resource_type_flattened_error(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_data_source_references_for_resource_type( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest(), parent="parent_value", + resource_type="resource_type_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.ListDataSourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_sources(**mock_args) +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_flattened_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_data_source_references_for_resource_type( + parent="parent_value", + resource_type="resource_type_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].resource_type + mock_val = "resource_type_value" + assert arg == mock_val -def test_list_data_sources_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_flattened_error_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_data_sources( - backupvault.ListDataSourcesRequest(), + await client.fetch_data_source_references_for_resource_type( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest(), parent="parent_value", + resource_type="resource_type_value", ) -def test_list_data_sources_rest_pager(transport: str = "rest"): +def test_fetch_data_source_references_for_resource_type_pager( + transport_name: str = "grpc", +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), - backupvault.DataSource(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), ], next_page_token="abc", ), - backupvault.ListDataSourcesResponse( - data_sources=[], + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[], next_page_token="def", ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), ], next_page_token="ghi", ), - backupvault.ListDataSourcesResponse( - data_sources=[ - backupvault.DataSource(), - backupvault.DataSource(), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), ], ), + RuntimeError, ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple( - backupvault.ListDataSourcesResponse.to_json(x) for x in response + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.fetch_data_source_references_for_resource_type( + request={}, retry=retry, timeout=timeout ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3" - } - pager = client.list_data_sources(request=sample_request) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 - assert all(isinstance(i, backupvault.DataSource) for i in results) + assert all( + isinstance(i, datasourcereference.DataSourceReference) for i in results + ) - pages = list(client.list_data_sources(request=sample_request).pages) + +def test_fetch_data_source_references_for_resource_type_pages( + transport_name: str = "grpc", +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + RuntimeError, + ) + pages = list( + client.fetch_data_source_references_for_resource_type(request={}).pages + ) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_data_source_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_async_pager(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_data_source_references_for_resource_type( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, datasourcereference.DataSourceReference) for i in responses + ) + + +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_async_pages(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.fetch_data_source_references_for_resource_type(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.InitializeServiceRequest, + dict, + ], +) +def test_initialize_service(request_type, transport: str = "grpc"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = backupdr.InitializeServiceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_initialize_service_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = backupdr.InitializeServiceRequest( + name="name_value", + resource_type="resource_type_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.initialize_service(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == backupdr.InitializeServiceRequest( + name="name_value", + resource_type="resource_type_value", + ) + + +def test_initialize_service_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14711,173 +15259,187 @@ def test_get_data_source_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_data_source in client._transport._wrapped_methods + assert ( + client._transport.initialize_service in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.initialize_service + ] = mock_rpc request = {} - client.get_data_source(request) + client.initialize_service(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_data_source(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_service(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_data_source_rest_required_fields( - request_type=backupvault.GetDataSourceRequest, +@pytest.mark.asyncio +async def test_initialize_service_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.BackupDRRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.initialize_service + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_data_source._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.initialize_service + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.initialize_service(request) - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_data_source._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.initialize_service(request) - # Designate an appropriate value for the returned response. - return_value = backupvault.DataSource() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.DataSource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_initialize_service_async( + transport: str = "grpc_asyncio", request_type=backupdr.InitializeServiceRequest +): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.get_data_source(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.initialize_service(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = backupdr.InitializeServiceRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_get_data_source_rest_unset_required_fields(): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.get_data_source._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) +@pytest.mark.asyncio +async def test_initialize_service_async_from_dict(): + await test_initialize_service_async(request_type=dict) -def test_get_data_source_rest_flattened(): +def test_initialize_service_field_headers(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.DataSource() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" - } + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupdr.InitializeServiceRequest() - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + request.name = "name_value" - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.DataSource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.initialize_service(request) - client.get_data_source(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" - % client.transport._host, - args[1], - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_get_data_source_rest_flattened_error(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_initialize_service_field_headers_async(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_data_source( - backupvault.GetDataSourceRequest(), - name="name_value", + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = backupdr.InitializeServiceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_update_data_source_rest_use_cached_wrapped_rpc(): + +def test_list_management_servers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14892,7 +15454,8 @@ def test_update_data_source_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_data_source in client._transport._wrapped_methods + client._transport.list_management_servers + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -14901,32 +15464,29 @@ def test_update_data_source_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_data_source + client._transport.list_management_servers ] = mock_rpc request = {} - client.update_data_source(request) + client.list_management_servers(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_data_source(request) + client.list_management_servers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_data_source_rest_required_fields( - request_type=backupvault.UpdateDataSourceRequest, +def test_list_management_servers_rest_required_fields( + request_type=backupdr.ListManagementServersRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14937,25 +15497,30 @@ def test_update_data_source_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_source._get_unset_required_fields(jsonified_request) + ).list_management_servers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_data_source._get_unset_required_fields(jsonified_request) + ).list_management_servers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "request_id", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14964,7 +15529,7 @@ def test_update_data_source_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupdr.ListManagementServersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14976,51 +15541,49 @@ def test_update_data_source_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_source(request) + response = client.list_management_servers(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_data_source_rest_unset_required_fields(): +def test_list_management_servers_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_data_source._get_unset_required_fields({}) + unset_fields = transport.list_management_servers._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "dataSource", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_update_data_source_rest_flattened(): +def test_list_management_servers_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15029,44 +15592,41 @@ def test_update_data_source_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupdr.ListManagementServersResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "data_source": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - data_source=backupvault.DataSource(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_source(**mock_args) + client.list_management_servers(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" + "%s/v1/{parent=projects/*/locations/*}/managementServers" % client.transport._host, args[1], ) -def test_update_data_source_rest_flattened_error(transport: str = "rest"): +def test_list_management_servers_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15075,14 +15635,76 @@ def test_update_data_source_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_data_source( - backupvault.UpdateDataSourceRequest(), - data_source=backupvault.DataSource(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_management_servers( + backupdr.ListManagementServersRequest(), + parent="parent_value", ) -def test_list_backups_rest_use_cached_wrapped_rpc(): +def test_list_management_servers_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + next_page_token="abc", + ), + backupdr.ListManagementServersResponse( + management_servers=[], + next_page_token="def", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + ], + next_page_token="ghi", + ), + backupdr.ListManagementServersResponse( + management_servers=[ + backupdr.ManagementServer(), + backupdr.ManagementServer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupdr.ListManagementServersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_management_servers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupdr.ManagementServer) for i in results) + + pages = list(client.list_management_servers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_management_server_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15096,33 +15718,40 @@ def test_list_backups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backups in client._transport._wrapped_methods + assert ( + client._transport.get_management_server + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_management_server + ] = mock_rpc request = {} - client.list_backups(request) + client.get_management_server(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backups(request) + client.get_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): +def test_get_management_server_rest_required_fields( + request_type=backupdr.GetManagementServerRequest, +): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15133,31 +15762,21 @@ def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsR unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) + ).get_management_server._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backups._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - "view", - ) - ) + ).get_management_server._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15166,7 +15785,7 @@ def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse() + return_value = backupdr.ManagementServer() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15187,41 +15806,30 @@ def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsR response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) + return_value = backupdr.ManagementServer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) + response = client.get_management_server(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backups_rest_unset_required_fields(): +def test_get_management_server_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backups._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - "view", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_backups_rest_flattened(): +def test_get_management_server_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15230,16 +15838,16 @@ def test_list_backups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse() + return_value = backupdr.ManagementServer() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + "name": "projects/sample1/locations/sample2/managementServers/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -15247,26 +15855,26 @@ def test_list_backups_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) + return_value = backupdr.ManagementServer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(**mock_args) + client.get_management_server(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" + "%s/v1/{name=projects/*/locations/*/managementServers/*}" % client.transport._host, args[1], ) -def test_list_backups_rest_flattened_error(transport: str = "rest"): +def test_get_management_server_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15275,76 +15883,13 @@ def test_list_backups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backups( - backupvault.ListBackupsRequest(), - parent="parent_value", - ) - - -def test_list_backups_rest_pager(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - backupvault.Backup(), - ], - next_page_token="abc", - ), - backupvault.ListBackupsResponse( - backups=[], - next_page_token="def", - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - ], - next_page_token="ghi", - ), - backupvault.ListBackupsResponse( - backups=[ - backupvault.Backup(), - backupvault.Backup(), - ], - ), + client.get_management_server( + backupdr.GetManagementServerRequest(), + name="name_value", ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" - } - - pager = client.list_backups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupvault.Backup) for i in results) - - pages = list(client.list_backups(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_get_backup_rest_use_cached_wrapped_rpc(): +def test_create_management_server_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15358,60 +15903,85 @@ def test_get_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup in client._transport._wrapped_methods + assert ( + client._transport.create_management_server + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_management_server + ] = mock_rpc request = {} - client.get_backup(request) + client.create_management_server(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupRequest): +def test_create_management_server_rest_required_fields( + request_type=backupdr.CreateManagementServerRequest, +): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" - request = request_type(**request_init) + request_init["parent"] = "" + request_init["management_server_id"] = "" + request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped + assert "managementServerId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).create_management_server._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "managementServerId" in jsonified_request + assert ( + jsonified_request["managementServerId"] == request_init["management_server_id"] + ) - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["managementServerId"] = "management_server_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup._get_unset_required_fields(jsonified_request) + ).create_management_server._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view",)) + assert not set(unset_fields) - set( + ( + "management_server_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "managementServerId" in jsonified_request + assert jsonified_request["managementServerId"] == "management_server_id_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15420,7 +15990,7 @@ def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupvault.Backup() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15432,39 +16002,57 @@ def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) + response = client.create_management_server(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "managementServerId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_rest_unset_required_fields(): +def test_create_management_server_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("view",)) & set(("name",))) + unset_fields = transport.create_management_server._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "managementServerId", + "requestId", + ) + ) + & set( + ( + "parent", + "managementServerId", + "managementServer", + ) + ) + ) -def test_get_backup_rest_flattened(): +def test_create_management_server_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15473,43 +16061,41 @@ def test_get_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.Backup() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupvault.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(**mock_args) + client.create_management_server(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + "%s/v1/{parent=projects/*/locations/*}/managementServers" % client.transport._host, args[1], ) -def test_get_backup_rest_flattened_error(transport: str = "rest"): +def test_create_management_server_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15518,13 +16104,15 @@ def test_get_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup( - backupvault.GetBackupRequest(), - name="name_value", + client.create_management_server( + backupdr.CreateManagementServerRequest(), + parent="parent_value", + management_server=backupdr.ManagementServer(name="name_value"), + management_server_id="management_server_id_value", ) -def test_update_backup_rest_use_cached_wrapped_rpc(): +def test_delete_management_server_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15538,17 +16126,22 @@ def test_update_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_backup in client._transport._wrapped_methods + assert ( + client._transport.delete_management_server + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_management_server + ] = mock_rpc request = {} - client.update_backup(request) + client.delete_management_server(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15557,19 +16150,20 @@ def test_update_backup_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_backup(request) + client.delete_management_server(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_backup_rest_required_fields( - request_type=backupvault.UpdateBackupRequest, +def test_delete_management_server_rest_required_fields( + request_type=backupdr.DeleteManagementServerRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15580,24 +16174,23 @@ def test_update_backup_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup._get_unset_required_fields(jsonified_request) + ).delete_management_server._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_backup._get_unset_required_fields(jsonified_request) + ).delete_management_server._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) + assert not set(unset_fields) - set(("request_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15618,10 +16211,9 @@ def test_update_backup_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -15632,36 +16224,23 @@ def test_update_backup_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup(request) + response = client.delete_management_server(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_backup_rest_unset_required_fields(): +def test_delete_management_server_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_backup._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set( - ( - "updateMask", - "backup", - ) - ) - ) + unset_fields = transport.delete_management_server._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) -def test_update_backup_rest_flattened(): +def test_delete_management_server_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15674,15 +16253,12 @@ def test_update_backup_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "backup": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } + "name": "projects/sample1/locations/sample2/managementServers/sample3" } # get truthy value for each flattened field mock_args = dict( - backup=backupvault.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + name="name_value", ) mock_args.update(sample_request) @@ -15694,20 +16270,20 @@ def test_update_backup_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup(**mock_args) + client.delete_management_server(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + "%s/v1/{name=projects/*/locations/*/managementServers/*}" % client.transport._host, args[1], ) -def test_update_backup_rest_flattened_error(transport: str = "rest"): +def test_delete_management_server_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15716,14 +16292,13 @@ def test_update_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_backup( - backupvault.UpdateBackupRequest(), - backup=backupvault.Backup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_management_server( + backupdr.DeleteManagementServerRequest(), + name="name_value", ) -def test_delete_backup_rest_use_cached_wrapped_rpc(): +def test_create_backup_vault_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15737,17 +16312,21 @@ def test_delete_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_backup in client._transport._wrapped_methods + assert ( + client._transport.create_backup_vault in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.create_backup_vault + ] = mock_rpc request = {} - client.delete_backup(request) + client.create_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15756,20 +16335,21 @@ def test_delete_backup_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup(request) + client.create_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_backup_rest_required_fields( - request_type=backupvault.DeleteBackupRequest, +def test_create_backup_vault_rest_required_fields( + request_type=backupvault.CreateBackupVaultRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" + request_init["backup_vault_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15777,26 +16357,38 @@ def test_delete_backup_rest_required_fields( ) # verify fields with default values are dropped + assert "backupVaultId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).create_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == request_init["backup_vault_id"] - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["backupVaultId"] = "backup_vault_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup._get_unset_required_fields(jsonified_request) + ).create_backup_vault._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "backup_vault_id", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupVaultId" in jsonified_request + assert jsonified_request["backupVaultId"] == "backup_vault_id_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15817,9 +16409,10 @@ def test_delete_backup_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -15830,23 +16423,44 @@ def test_delete_backup_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) + response = client.create_backup_vault(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "backupVaultId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_rest_unset_required_fields(): +def test_create_backup_vault_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.create_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupVaultId", + "requestId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "backupVaultId", + "backupVault", + ) + ) + ) -def test_delete_backup_rest_flattened(): +def test_create_backup_vault_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15858,13 +16472,13 @@ def test_delete_backup_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", ) mock_args.update(sample_request) @@ -15876,20 +16490,20 @@ def test_delete_backup_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(**mock_args) + client.create_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + "%s/v1/{parent=projects/*/locations/*}/backupVaults" % client.transport._host, args[1], ) -def test_delete_backup_rest_flattened_error(transport: str = "rest"): +def test_create_backup_vault_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15898,13 +16512,15 @@ def test_delete_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup( - backupvault.DeleteBackupRequest(), - name="name_value", + client.create_backup_vault( + backupvault.CreateBackupVaultRequest(), + parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + backup_vault_id="backup_vault_id_value", ) -def test_restore_backup_rest_use_cached_wrapped_rpc(): +def test_list_backup_vaults_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15918,39 +16534,39 @@ def test_restore_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.restore_backup in client._transport._wrapped_methods + assert ( + client._transport.list_backup_vaults in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_backup_vaults + ] = mock_rpc request = {} - client.restore_backup(request) + client.list_backup_vaults(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.restore_backup(request) + client.list_backup_vaults(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_restore_backup_rest_required_fields( - request_type=backupvault.RestoreBackupRequest, +def test_list_backup_vaults_rest_required_fields( + request_type=backupvault.ListBackupVaultsRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15961,21 +16577,31 @@ def test_restore_backup_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restore_backup._get_unset_required_fields(jsonified_request) + ).list_backup_vaults._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).restore_backup._get_unset_required_fields(jsonified_request) + ).list_backup_vaults._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15984,7 +16610,7 @@ def test_restore_backup_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListBackupVaultsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15996,37 +16622,50 @@ def test_restore_backup_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restore_backup(request) + response = client.list_backup_vaults(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_restore_backup_rest_unset_required_fields(): +def test_list_backup_vaults_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.restore_backup._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_backup_vaults._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) -def test_restore_backup_rest_flattened(): +def test_list_backup_vaults_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16035,41 +16674,41 @@ def test_restore_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListBackupVaultsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restore_backup(**mock_args) + client.list_backup_vaults(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore" + "%s/v1/{parent=projects/*/locations/*}/backupVaults" % client.transport._host, args[1], ) -def test_restore_backup_rest_flattened_error(transport: str = "rest"): +def test_list_backup_vaults_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16078,13 +16717,76 @@ def test_restore_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.restore_backup( - backupvault.RestoreBackupRequest(), - name="name_value", + client.list_backup_vaults( + backupvault.ListBackupVaultsRequest(), + parent="parent_value", ) -def test_create_backup_plan_rest_use_cached_wrapped_rpc(): +def test_list_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.list_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_fetch_usable_backup_vaults_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16099,7 +16801,8 @@ def test_create_backup_plan_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_backup_plan in client._transport._wrapped_methods + client._transport.fetch_usable_backup_vaults + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16108,34 +16811,29 @@ def test_create_backup_plan_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_backup_plan + client._transport.fetch_usable_backup_vaults ] = mock_rpc request = {} - client.create_backup_plan(request) + client.fetch_usable_backup_vaults(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_plan(request) + client.fetch_usable_backup_vaults(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_backup_plan_rest_required_fields( - request_type=backupplan.CreateBackupPlanRequest, +def test_fetch_usable_backup_vaults_rest_required_fields( + request_type=backupvault.FetchUsableBackupVaultsRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} request_init["parent"] = "" - request_init["backup_plan_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16143,28 +16841,26 @@ def test_create_backup_plan_rest_required_fields( ) # verify fields with default values are dropped - assert "backupPlanId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_plan._get_unset_required_fields(jsonified_request) + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "backupPlanId" in jsonified_request - assert jsonified_request["backupPlanId"] == request_init["backup_plan_id"] jsonified_request["parent"] = "parent_value" - jsonified_request["backupPlanId"] = "backup_plan_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_plan._get_unset_required_fields(jsonified_request) + ).fetch_usable_backup_vaults._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "backup_plan_id", - "request_id", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -16172,8 +16868,6 @@ def test_create_backup_plan_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "backupPlanId" in jsonified_request - assert jsonified_request["backupPlanId"] == "backup_plan_id_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16182,7 +16876,7 @@ def test_create_backup_plan_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.FetchUsableBackupVaultsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16194,57 +16888,49 @@ def test_create_backup_plan_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_plan(request) + response = client.fetch_usable_backup_vaults(request) - expected_params = [ - ( - "backupPlanId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_backup_plan_rest_unset_required_fields(): +def test_fetch_usable_backup_vaults_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_backup_plan._get_unset_required_fields({}) + unset_fields = transport.fetch_usable_backup_vaults._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "backupPlanId", - "requestId", - ) - ) - & set( - ( - "parent", - "backupPlanId", - "backupPlan", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_create_backup_plan_rest_flattened(): +def test_fetch_usable_backup_vaults_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16253,7 +16939,7 @@ def test_create_backup_plan_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.FetchUsableBackupVaultsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -16261,33 +16947,33 @@ def test_create_backup_plan_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - backup_plan=backupplan.BackupPlan(name="name_value"), - backup_plan_id="backup_plan_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_plan(**mock_args) + client.fetch_usable_backup_vaults(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupPlans" + "%s/v1/{parent=projects/*/locations/*}/backupVaults:fetchUsable" % client.transport._host, args[1], ) -def test_create_backup_plan_rest_flattened_error(transport: str = "rest"): +def test_fetch_usable_backup_vaults_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16296,15 +16982,76 @@ def test_create_backup_plan_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_plan( - backupplan.CreateBackupPlanRequest(), + client.fetch_usable_backup_vaults( + backupvault.FetchUsableBackupVaultsRequest(), parent="parent_value", - backup_plan=backupplan.BackupPlan(name="name_value"), - backup_plan_id="backup_plan_id_value", ) -def test_get_backup_plan_rest_use_cached_wrapped_rpc(): +def test_fetch_usable_backup_vaults_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + next_page_token="abc", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[], + next_page_token="def", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + ], + next_page_token="ghi", + ), + backupvault.FetchUsableBackupVaultsResponse( + backup_vaults=[ + backupvault.BackupVault(), + backupvault.BackupVault(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupvault.FetchUsableBackupVaultsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.fetch_usable_backup_vaults(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.BackupVault) for i in results) + + pages = list(client.fetch_usable_backup_vaults(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_backup_vault_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16318,30 +17065,32 @@ def test_get_backup_plan_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_backup_plan in client._transport._wrapped_methods + assert client._transport.get_backup_vault in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_backup_vault + ] = mock_rpc request = {} - client.get_backup_plan(request) + client.get_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_backup_plan(request) + client.get_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_plan_rest_required_fields( - request_type=backupplan.GetBackupPlanRequest, +def test_get_backup_vault_rest_required_fields( + request_type=backupvault.GetBackupVaultRequest, ): transport_class = transports.BackupDRRestTransport @@ -16357,7 +17106,7 @@ def test_get_backup_plan_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_plan._get_unset_required_fields(jsonified_request) + ).get_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16366,7 +17115,9 @@ def test_get_backup_plan_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_plan._get_unset_required_fields(jsonified_request) + ).get_backup_vault._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16380,7 +17131,7 @@ def test_get_backup_plan_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupplan.BackupPlan() + return_value = backupvault.BackupVault() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16401,30 +17152,30 @@ def test_get_backup_plan_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupplan.BackupPlan.pb(return_value) + return_value = backupvault.BackupVault.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_plan(request) + response = client.get_backup_vault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_plan_rest_unset_required_fields(): +def test_get_backup_vault_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup_plan._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.get_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) -def test_get_backup_plan_rest_flattened(): +def test_get_backup_vault_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16433,11 +17184,11 @@ def test_get_backup_plan_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupplan.BackupPlan() + return_value = backupvault.BackupVault() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupPlans/sample3" + "name": "projects/sample1/locations/sample2/backupVaults/sample3" } # get truthy value for each flattened field @@ -16450,26 +17201,26 @@ def test_get_backup_plan_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupplan.BackupPlan.pb(return_value) + return_value = backupvault.BackupVault.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_plan(**mock_args) + client.get_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" % client.transport._host, args[1], ) -def test_get_backup_plan_rest_flattened_error(transport: str = "rest"): +def test_get_backup_vault_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16478,13 +17229,13 @@ def test_get_backup_plan_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_plan( - backupplan.GetBackupPlanRequest(), + client.get_backup_vault( + backupvault.GetBackupVaultRequest(), name="name_value", ) -def test_list_backup_plans_rest_use_cached_wrapped_rpc(): +def test_update_backup_vault_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16498,7 +17249,9 @@ def test_list_backup_plans_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_backup_plans in client._transport._wrapped_methods + assert ( + client._transport.update_backup_vault in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -16506,29 +17259,32 @@ def test_list_backup_plans_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_backup_plans + client._transport.update_backup_vault ] = mock_rpc request = {} - client.list_backup_plans(request) + client.update_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_plans(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backup_plans_rest_required_fields( - request_type=backupplan.ListBackupPlansRequest, +def test_update_backup_vault_rest_required_fields( + request_type=backupvault.UpdateBackupVaultRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16539,30 +17295,27 @@ def test_list_backup_plans_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_plans._get_unset_required_fields(jsonified_request) + ).update_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_plans._get_unset_required_fields(jsonified_request) + ).update_backup_vault._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "force", + "force_update_access_restriction", + "request_id", + "update_mask", + "validate_only", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16571,7 +17324,7 @@ def test_list_backup_plans_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupplan.ListBackupPlansResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16583,49 +17336,53 @@ def test_list_backup_plans_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplan.ListBackupPlansResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_plans(request) + response = client.update_backup_vault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backup_plans_rest_unset_required_fields(): +def test_update_backup_vault_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backup_plans._get_unset_required_fields({}) + unset_fields = transport.update_backup_vault._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "force", + "forceUpdateAccessRestriction", + "requestId", + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "updateMask", + "backupVault", ) ) - & set(("parent",)) ) -def test_list_backup_plans_rest_flattened(): +def test_update_backup_vault_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16634,41 +17391,44 @@ def test_list_backup_plans_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupplan.ListBackupPlansResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupplan.ListBackupPlansResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_plans(**mock_args) + client.update_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupPlans" + "%s/v1/{backup_vault.name=projects/*/locations/*/backupVaults/*}" % client.transport._host, args[1], ) -def test_list_backup_plans_rest_flattened_error(transport: str = "rest"): +def test_update_backup_vault_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16677,83 +17437,21 @@ def test_list_backup_plans_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_plans( - backupplan.ListBackupPlansRequest(), - parent="parent_value", + client.update_backup_vault( + backupvault.UpdateBackupVaultRequest(), + backup_vault=backupvault.BackupVault(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_backup_plans_rest_pager(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - next_page_token="abc", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[], - next_page_token="def", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - ], - next_page_token="ghi", - ), - backupplan.ListBackupPlansResponse( - backup_plans=[ - backupplan.BackupPlan(), - backupplan.BackupPlan(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backupplan.ListBackupPlansResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_backup_plans(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, backupplan.BackupPlan) for i in results) - - pages = list(client.list_backup_plans(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_delete_backup_vault_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -16761,7 +17459,7 @@ def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_backup_plan in client._transport._wrapped_methods + client._transport.delete_backup_vault in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16770,11 +17468,11 @@ def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_backup_plan + client._transport.delete_backup_vault ] = mock_rpc request = {} - client.delete_backup_plan(request) + client.delete_backup_vault(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -16783,15 +17481,15 @@ def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.delete_backup_plan(request) + client.delete_backup_vault(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_backup_plan_rest_required_fields( - request_type=backupplan.DeleteBackupPlanRequest, +def test_delete_backup_vault_rest_required_fields( + request_type=backupvault.DeleteBackupVaultRequest, ): transport_class = transports.BackupDRRestTransport @@ -16807,7 +17505,7 @@ def test_delete_backup_plan_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_plan._get_unset_required_fields(jsonified_request) + ).delete_backup_vault._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16816,9 +17514,18 @@ def test_delete_backup_plan_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_plan._get_unset_required_fields(jsonified_request) + ).delete_backup_vault._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "force", + "ignore_backup_plan_references", + "request_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16857,23 +17564,35 @@ def test_delete_backup_plan_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_plan(request) + response = client.delete_backup_vault(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_plan_rest_unset_required_fields(): +def test_delete_backup_vault_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup_plan._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.delete_backup_vault._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "force", + "ignoreBackupPlanReferences", + "requestId", + "validateOnly", + ) + ) + & set(("name",)) + ) -def test_delete_backup_plan_rest_flattened(): +def test_delete_backup_vault_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16886,7 +17605,7 @@ def test_delete_backup_plan_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupPlans/sample3" + "name": "projects/sample1/locations/sample2/backupVaults/sample3" } # get truthy value for each flattened field @@ -16903,20 +17622,20 @@ def test_delete_backup_plan_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_plan(**mock_args) + client.delete_backup_vault(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + "%s/v1/{name=projects/*/locations/*/backupVaults/*}" % client.transport._host, args[1], ) -def test_delete_backup_plan_rest_flattened_error(transport: str = "rest"): +def test_delete_backup_vault_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16925,13 +17644,13 @@ def test_delete_backup_plan_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup_plan( - backupplan.DeleteBackupPlanRequest(), + client.delete_backup_vault( + backupvault.DeleteBackupVaultRequest(), name="name_value", ) -def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): +def test_list_data_sources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16945,10 +17664,7 @@ def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_backup_plan_association - in client._transport._wrapped_methods - ) + assert client._transport.list_data_sources in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -16956,34 +17672,29 @@ def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_backup_plan_association + client._transport.list_data_sources ] = mock_rpc request = {} - client.create_backup_plan_association(request) + client.list_data_sources(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_backup_plan_association(request) + client.list_data_sources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_backup_plan_association_rest_required_fields( - request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +def test_list_data_sources_rest_required_fields( + request_type=backupvault.ListDataSourcesRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} request_init["parent"] = "" - request_init["backup_plan_association_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16991,31 +17702,26 @@ def test_create_backup_plan_association_rest_required_fields( ) # verify fields with default values are dropped - assert "backupPlanAssociationId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + ).list_data_sources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "backupPlanAssociationId" in jsonified_request - assert ( - jsonified_request["backupPlanAssociationId"] - == request_init["backup_plan_association_id"] - ) jsonified_request["parent"] = "parent_value" - jsonified_request["backupPlanAssociationId"] = "backup_plan_association_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + ).list_data_sources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "backup_plan_association_id", - "request_id", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) @@ -17023,11 +17729,6 @@ def test_create_backup_plan_association_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" - assert "backupPlanAssociationId" in jsonified_request - assert ( - jsonified_request["backupPlanAssociationId"] - == "backup_plan_association_id_value" - ) client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17036,7 +17737,7 @@ def test_create_backup_plan_association_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListDataSourcesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17048,59 +17749,49 @@ def test_create_backup_plan_association_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_plan_association(request) + response = client.list_data_sources(request) - expected_params = [ - ( - "backupPlanAssociationId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_backup_plan_association_rest_unset_required_fields(): +def test_list_data_sources_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_backup_plan_association._get_unset_required_fields( - {} - ) + unset_fields = transport.list_data_sources._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "backupPlanAssociationId", - "requestId", - ) - ) - & set( - ( - "parent", - "backupPlanAssociationId", - "backupPlanAssociation", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_create_backup_plan_association_rest_flattened(): +def test_list_data_sources_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17109,43 +17800,43 @@ def test_create_backup_plan_association_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListDataSourcesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } # get truthy value for each flattened field mock_args = dict( parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_plan_association(**mock_args) + client.list_data_sources(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" + "%s/v1/{parent=projects/*/locations/*/backupVaults/*}/dataSources" % client.transport._host, args[1], ) -def test_create_backup_plan_association_rest_flattened_error(transport: str = "rest"): +def test_list_data_sources_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17154,59 +17845,115 @@ def test_create_backup_plan_association_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_backup_plan_association( - backupplanassociation.CreateBackupPlanAssociationRequest(), + client.list_data_sources( + backupvault.ListDataSourcesRequest(), parent="parent_value", - backup_plan_association=backupplanassociation.BackupPlanAssociation( - name="name_value" - ), - backup_plan_association_id="backup_plan_association_id_value", - ) - - -def test_get_backup_plan_association_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.get_backup_plan_association - in client._transport._wrapped_methods ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.get_backup_plan_association - ] = mock_rpc - request = {} - client.get_backup_plan_association(request) +def test_list_data_sources_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + backupvault.DataSource(), + ], + next_page_token="abc", + ), + backupvault.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + ], + next_page_token="ghi", + ), + backupvault.ListDataSourcesResponse( + data_sources=[ + backupvault.DataSource(), + backupvault.DataSource(), + ], + ), + ) + # Two responses for two calls + response = response + response - client.get_backup_plan_association(request) + # Wrap the values into proper Response objs + response = tuple( + backupvault.ListDataSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3" + } + + pager = client.list_data_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.DataSource) for i in results) + + pages = list(client.list_data_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_data_source_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_source in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_source] = mock_rpc + + request = {} + client.get_data_source(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_backup_plan_association_rest_required_fields( - request_type=backupplanassociation.GetBackupPlanAssociationRequest, +def test_get_data_source_rest_required_fields( + request_type=backupvault.GetDataSourceRequest, ): transport_class = transports.BackupDRRestTransport @@ -17222,7 +17969,7 @@ def test_get_backup_plan_association_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + ).get_data_source._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17231,7 +17978,7 @@ def test_get_backup_plan_association_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + ).get_data_source._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17245,7 +17992,7 @@ def test_get_backup_plan_association_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupplanassociation.BackupPlanAssociation() + return_value = backupvault.DataSource() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17266,30 +18013,30 @@ def test_get_backup_plan_association_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + return_value = backupvault.DataSource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_plan_association(request) + response = client.get_data_source(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_backup_plan_association_rest_unset_required_fields(): +def test_get_data_source_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_backup_plan_association._get_unset_required_fields({}) + unset_fields = transport.get_data_source._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_backup_plan_association_rest_flattened(): +def test_get_data_source_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17298,11 +18045,11 @@ def test_get_backup_plan_association_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupplanassociation.BackupPlanAssociation() + return_value = backupvault.DataSource() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" } # get truthy value for each flattened field @@ -17315,26 +18062,26 @@ def test_get_backup_plan_association_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + return_value = backupvault.DataSource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_plan_association(**mock_args) + client.get_data_source(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*}" % client.transport._host, args[1], ) -def test_get_backup_plan_association_rest_flattened_error(transport: str = "rest"): +def test_get_data_source_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17343,13 +18090,13 @@ def test_get_backup_plan_association_rest_flattened_error(transport: str = "rest # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_backup_plan_association( - backupplanassociation.GetBackupPlanAssociationRequest(), + client.get_data_source( + backupvault.GetDataSourceRequest(), name="name_value", ) -def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): +def test_update_data_source_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17364,8 +18111,7 @@ def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_backup_plan_associations - in client._transport._wrapped_methods + client._transport.update_data_source in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -17374,29 +18120,32 @@ def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_backup_plan_associations + client._transport.update_data_source ] = mock_rpc request = {} - client.list_backup_plan_associations(request) + client.update_data_source(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_backup_plan_associations(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_data_source(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_backup_plan_associations_rest_required_fields( - request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +def test_update_data_source_rest_required_fields( + request_type=backupvault.UpdateDataSourceRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17407,29 +18156,25 @@ def test_list_backup_plan_associations_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) + ).update_data_source._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) + ).update_data_source._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "page_size", - "page_token", + "allow_missing", + "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17438,7 +18183,7 @@ def test_list_backup_plan_associations_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17450,52 +18195,51 @@ def test_list_backup_plan_associations_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_plan_associations(request) + response = client.update_data_source(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_backup_plan_associations_rest_unset_required_fields(): +def test_update_data_source_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_backup_plan_associations._get_unset_required_fields( - {} - ) + unset_fields = transport.update_data_source._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "pageSize", - "pageToken", + "allowMissing", + "requestId", + "updateMask", + ) + ) + & set( + ( + "updateMask", + "dataSource", ) ) - & set(("parent",)) ) -def test_list_backup_plan_associations_rest_flattened(): +def test_update_data_source_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17504,43 +18248,44 @@ def test_list_backup_plan_associations_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_plan_associations(**mock_args) + client.update_data_source(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" + "%s/v1/{data_source.name=projects/*/locations/*/backupVaults/*/dataSources/*}" % client.transport._host, args[1], ) -def test_list_backup_plan_associations_rest_flattened_error(transport: str = "rest"): +def test_update_data_source_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17549,130 +18294,54 @@ def test_list_backup_plan_associations_rest_flattened_error(transport: str = "re # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_backup_plan_associations( - backupplanassociation.ListBackupPlanAssociationsRequest(), - parent="parent_value", + client.update_data_source( + backupvault.UpdateDataSourceRequest(), + data_source=backupvault.DataSource(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_backup_plan_associations_rest_pager(transport: str = "rest"): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="abc", - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[], - next_page_token="def", - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - ], - next_page_token="ghi", - ), - backupplanassociation.ListBackupPlanAssociationsResponse( - backup_plan_associations=[ - backupplanassociation.BackupPlanAssociation(), - backupplanassociation.BackupPlanAssociation(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - backupplanassociation.ListBackupPlanAssociationsResponse.to_json(x) - for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_backup_plan_associations(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results - ) - - pages = list(client.list_backup_plan_associations(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_delete_backup_plan_association_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_list_backups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_backup_plan_association - in client._transport._wrapped_methods - ) + assert client._transport.list_backups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_backup_plan_association - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_backups] = mock_rpc request = {} - client.delete_backup_plan_association(request) + client.list_backups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_backup_plan_association(request) + client.list_backups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_backup_plan_association_rest_required_fields( - request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, -): +def test_list_backups_rest_required_fields(request_type=backupvault.ListBackupsRequest): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17683,23 +18352,31 @@ def test_delete_backup_plan_association_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) + ).list_backups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17708,7 +18385,7 @@ def test_delete_backup_plan_association_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListBackupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17720,38 +18397,50 @@ def test_delete_backup_plan_association_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_plan_association(request) + response = client.list_backups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_backup_plan_association_rest_unset_required_fields(): +def test_list_backups_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_backup_plan_association._get_unset_required_fields( - {} + unset_fields = transport.list_backups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) ) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) -def test_delete_backup_plan_association_rest_flattened(): +def test_list_backups_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17760,41 +18449,43 @@ def test_delete_backup_plan_association_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListBackupsResponse() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_plan_association(**mock_args) + client.list_backups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" + "%s/v1/{parent=projects/*/locations/*/backupVaults/*/dataSources/*}/backups" % client.transport._host, args[1], ) -def test_delete_backup_plan_association_rest_flattened_error(transport: str = "rest"): +def test_list_backups_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17803,13 +18494,76 @@ def test_delete_backup_plan_association_rest_flattened_error(transport: str = "r # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_backup_plan_association( - backupplanassociation.DeleteBackupPlanAssociationRequest(), - name="name_value", + client.list_backups( + backupvault.ListBackupsRequest(), + parent="parent_value", ) -def test_trigger_backup_rest_use_cached_wrapped_rpc(): +def test_list_backups_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + backupvault.Backup(), + ], + next_page_token="abc", + ), + backupvault.ListBackupsResponse( + backups=[], + next_page_token="def", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + ], + next_page_token="ghi", + ), + backupvault.ListBackupsResponse( + backups=[ + backupvault.Backup(), + backupvault.Backup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(backupvault.ListBackupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + + pager = client.list_backups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupvault.Backup) for i in results) + + pages = list(client.list_backups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17823,40 +18577,33 @@ def test_trigger_backup_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.trigger_backup in client._transport._wrapped_methods + assert client._transport.get_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc + client._transport._wrapped_methods[client._transport.get_backup] = mock_rpc request = {} - client.trigger_backup(request) + client.get_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.trigger_backup(request) + client.get_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_trigger_backup_rest_required_fields( - request_type=backupplanassociation.TriggerBackupRequest, -): +def test_get_backup_rest_required_fields(request_type=backupvault.GetBackupRequest): transport_class = transports.BackupDRRestTransport request_init = {} request_init["name"] = "" - request_init["rule_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17867,24 +18614,23 @@ def test_trigger_backup_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).trigger_backup._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" - jsonified_request["ruleId"] = "rule_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).trigger_backup._get_unset_required_fields(jsonified_request) + ).get_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "ruleId" in jsonified_request - assert jsonified_request["ruleId"] == "rule_id_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17893,7 +18639,7 @@ def test_trigger_backup_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.Backup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17905,45 +18651,39 @@ def test_trigger_backup_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.trigger_backup(request) + response = client.get_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_trigger_backup_rest_unset_required_fields(): +def test_get_backup_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.trigger_backup._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "name", - "ruleId", - ) - ) - ) + unset_fields = transport.get_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) -def test_trigger_backup_rest_flattened(): +def test_get_backup_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17952,42 +18692,43 @@ def test_trigger_backup_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.Backup() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" } # get truthy value for each flattened field mock_args = dict( name="name_value", - rule_id="rule_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupvault.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.trigger_backup(**mock_args) + client.get_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup" + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" % client.transport._host, args[1], ) -def test_trigger_backup_rest_flattened_error(transport: str = "rest"): +def test_get_backup_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17996,14 +18737,13 @@ def test_trigger_backup_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.trigger_backup( - backupplanassociation.TriggerBackupRequest(), + client.get_backup( + backupvault.GetBackupRequest(), name="name_value", - rule_id="rule_id_value", ) -def test_initialize_service_rest_use_cached_wrapped_rpc(): +def test_update_backup_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18017,21 +18757,17 @@ def test_initialize_service_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.initialize_service in client._transport._wrapped_methods - ) + assert client._transport.update_backup in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.initialize_service - ] = mock_rpc + client._transport._wrapped_methods[client._transport.update_backup] = mock_rpc request = {} - client.initialize_service(request) + client.update_backup(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -18040,21 +18776,19 @@ def test_initialize_service_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.initialize_service(request) + client.update_backup(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_initialize_service_rest_required_fields( - request_type=backupdr.InitializeServiceRequest, +def test_update_backup_rest_required_fields( + request_type=backupvault.UpdateBackupRequest, ): transport_class = transports.BackupDRRestTransport request_init = {} - request_init["name"] = "" - request_init["resource_type"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18065,24 +18799,24 @@ def test_initialize_service_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).initialize_service._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - jsonified_request["resourceType"] = "resource_type_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).initialize_service._get_unset_required_fields(jsonified_request) + ).update_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - assert "resourceType" in jsonified_request - assert jsonified_request["resourceType"] == "resource_type_value" client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18103,7 +18837,7 @@ def test_initialize_service_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "patch", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -18117,1660 +18851,7538 @@ def test_initialize_service_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.initialize_service(request) + response = client.update_backup(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_initialize_service_rest_unset_required_fields(): +def test_update_backup_rest_unset_required_fields(): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.initialize_service._get_unset_required_fields({}) + unset_fields = transport.update_backup._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "requestId", + "updateMask", + ) + ) & set( ( - "name", - "resourceType", + "updateMask", + "backup", ) ) ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.BackupDRGrpcTransport( +def test_update_backup_rest_flattened(): + client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) + mock_args.update(sample_request) - # It is an error to provide a credentials file and a transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = BackupDRClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup.name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], ) - # It is an error to provide an api_key and a transport instance. - transport = transports.BackupDRGrpcTransport( + +def test_update_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = BackupDRClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" + # Attempting to call a method with both a request object and flattened + # fields is an error. with pytest.raises(ValueError): - client = BackupDRClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + client.update_backup( + backupvault.UpdateBackupRequest(), + backup=backupvault.Backup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - # It is an error to provide scopes and a transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): + +def test_delete_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = BackupDRClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = BackupDRClient(transport=transport) - assert client.transport is transport + # Ensure method has been cached + assert client._transport.delete_backup in client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_backup] = mock_rpc -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.BackupDRGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + request = {} + client.delete_backup(request) - transport = transports.BackupDRGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() -@pytest.mark.parametrize( - "transport_class", - [ - transports.BackupDRGrpcTransport, - transports.BackupDRGrpcAsyncIOTransport, - transports.BackupDRRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + client.delete_backup(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_transport_kind_grpc(): - transport = BackupDRClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" +def test_delete_backup_rest_required_fields( + request_type=backupvault.DeleteBackupRequest, +): + transport_class = transports.BackupDRRestTransport -def test_initialize_client_w_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - assert client is not None + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_management_servers_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), "__call__" - ) as call: - call.return_value = backupdr.ListManagementServersResponse() - client.list_management_servers(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.ListManagementServersRequest() + jsonified_request["name"] = "name_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_management_server_empty_call_grpc(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), "__call__" - ) as call: - call.return_value = backupdr.ManagementServer() - client.get_management_server(request=None) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.GetManagementServerRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_backup(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_management_server_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_management_server(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.CreateManagementServerRequest() +def test_delete_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.delete_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_management_server_empty_call_grpc(): +def test_delete_backup_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_management_server(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.DeleteManagementServerRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_vault_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup_vault(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.CreateBackupVaultRequest() + client.delete_backup(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_vaults_empty_call_grpc(): +def test_delete_backup_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), "__call__" - ) as call: - call.return_value = backupvault.ListBackupVaultsResponse() - client.list_backup_vaults(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup( + backupvault.DeleteBackupRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupVaultsRequest() - assert args[0] == request_msg +def test_restore_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_fetch_usable_backup_vaults_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.restore_backup in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), "__call__" - ) as call: - call.return_value = backupvault.FetchUsableBackupVaultsResponse() - client.fetch_usable_backup_vaults(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.restore_backup] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.FetchUsableBackupVaultsRequest() + request = {} + client.restore_backup(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_vault_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + client.restore_backup(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: - call.return_value = backupvault.BackupVault() - client.get_backup_vault(request=None) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupVaultRequest() - assert args[0] == request_msg +def test_restore_backup_rest_required_fields( + request_type=backupvault.RestoreBackupRequest, +): + transport_class = transports.BackupDRRestTransport + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_vault_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # verify fields with default values are dropped - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_backup_vault(request=None) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupVaultRequest() + # verify required fields with default values are now present - assert args[0] == request_msg + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).restore_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_vault_empty_call_grpc(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup_vault(request=None) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupVaultRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.restore_backup(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_data_sources_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), "__call__" - ) as call: - call.return_value = backupvault.ListDataSourcesResponse() - client.list_data_sources(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListDataSourcesRequest() +def test_restore_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.restore_backup._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_data_source_empty_call_grpc(): +def test_restore_backup_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: - call.return_value = backupvault.DataSource() - client.get_data_source(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetDataSourceRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_data_source_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_data_source(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateDataSourceRequest() + client.restore_backup(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupVaults/*/dataSources/*/backups/*}:restore" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backups_empty_call_grpc(): +def test_restore_backup_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - call.return_value = backupvault.ListBackupsResponse() - client.list_backups(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupsRequest() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.restore_backup( + backupvault.RestoreBackupRequest(), + name="name_value", + ) - assert args[0] == request_msg +def test_create_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - call.return_value = backupvault.Backup() - client.get_backup(request=None) + # Ensure method has been cached + assert ( + client._transport.create_backup_plan in client._transport._wrapped_methods + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan + ] = mock_rpc - assert args[0] == request_msg + request = {} + client.create_backup_plan(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_backup(request=None) + client.create_backup_plan(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_create_backup_plan_rest_required_fields( + request_type=backupplan.CreateBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup(request=None) + # verify fields with default values are dropped + assert "backupPlanId" not in jsonified_request - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == request_init["backup_plan_id"] + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanId"] = "backup_plan_id_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_restore_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_id", + "request_id", + ) ) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.restore_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.RestoreBackupRequest() - - assert args[0] == request_msg - + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanId" in jsonified_request + assert jsonified_request["backupPlanId"] == "backup_plan_id_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_plan_empty_call_grpc(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup_plan(request=None) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.CreateBackupPlanRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backup_plan(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_plan_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [ + ( + "backupPlanId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: - call.return_value = backupplan.BackupPlan() - client.get_backup_plan(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.GetBackupPlanRequest() +def test_create_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.create_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "backupPlanId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanId", + "backupPlan", + ) + ) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_plans_empty_call_grpc(): +def test_create_backup_plan_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: - call.return_value = backupplan.ListBackupPlansResponse() - client.list_backup_plans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.ListBackupPlansRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_plan_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup_plan(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.DeleteBackupPlanRequest() + client.create_backup_plan(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_backup_plan_association_empty_call_grpc(): +def test_create_backup_plan_rest_flattened_error(transport: str = "rest"): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_backup_plan_association(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan( + backupplan.CreateBackupPlanRequest(), + parent="parent_value", + backup_plan=backupplan.BackupPlan(name="name_value"), + backup_plan_id="backup_plan_id_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.CreateBackupPlanAssociationRequest() - assert args[0] == request_msg +def test_update_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_backup_plan_association_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert ( + client._transport.update_backup_plan in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" - ) as call: - call.return_value = backupplanassociation.BackupPlanAssociation() - client.get_backup_plan_association(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_plan + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.GetBackupPlanAssociationRequest() + request = {} + client.update_backup_plan(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + client.update_backup_plan(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_backup_plan_associations_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_plan_rest_required_fields( + request_type=backupplan.UpdateBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" - ) as call: - call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - client.list_backup_plan_associations(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.ListBackupPlanAssociationsRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_backup_plan_association_empty_call_grpc(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_backup_plan_association(request=None) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.DeleteBackupPlanAssociationRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_backup_plan(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_trigger_backup_empty_call_grpc(): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.trigger_backup(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.TriggerBackupRequest() +def test_update_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.update_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "backupPlan", + "updateMask", + ) + ) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_initialize_service_empty_call_grpc(): +def test_update_backup_plan_rest_flattened(): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.initialize_service), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.initialize_service(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.InitializeServiceRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - assert args[0] == request_msg + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_plan": { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } + } + # get truthy value for each flattened field + mock_args = dict( + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) -def test_transport_kind_grpc_asyncio(): - transport = BackupDRAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_backup_plan(**mock_args) -def test_initialize_client_w_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup_plan.name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_management_servers_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_update_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_management_servers), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupdr.ListManagementServersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_plan( + backupplan.UpdateBackupPlanRequest(), + backup_plan=backupplan.BackupPlan(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - await client.list_management_servers(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.ListManagementServersRequest() - assert args[0] == request_msg +def test_get_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_management_server_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.get_backup_plan in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_management_server), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupdr.ManagementServer( - name="name_value", - description="description_value", - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag="etag_value", - oauth2_client_id="oauth2_client_id_value", - ba_proxy_uri=["ba_proxy_uri_value"], - satisfies_pzi=True, - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.get_management_server(request=None) + client._transport._wrapped_methods[client._transport.get_backup_plan] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.GetManagementServerRequest() + request = {} + client.get_backup_plan(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.get_backup_plan(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_management_server_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_management_server), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_management_server(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.CreateManagementServerRequest() +def test_get_backup_plan_rest_required_fields( + request_type=backupplan.GetBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport - assert args[0] == request_msg + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_management_server_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_management_server), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_management_server(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.DeleteManagementServerRequest() + jsonified_request["name"] = "name_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_vault_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_vault), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_backup_vault(request=None) + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.CreateBackupVaultRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_vaults_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + response = client.get_backup_plan(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_vaults), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupvault.ListBackupVaultsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_backup_vaults(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupVaultsRequest() - assert args[0] == request_msg +def test_get_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.get_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_fetch_usable_backup_vaults_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_get_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.fetch_usable_backup_vaults), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupvault.FetchUsableBackupVaultsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.fetch_usable_backup_vaults(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlan() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.FetchUsableBackupVaultsRequest() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlan.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_vault_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + client.get_backup_plan(**mock_args) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupvault.BackupVault( - name="name_value", - description="description_value", - deletable=True, - etag="etag_value", - state=backupvault.BackupVault.State.CREATING, - backup_count=1278, - service_account="service_account_value", - total_stored_bytes=1946, - uid="uid_value", - access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, - ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], ) - await client.get_backup_vault(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupVaultRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_vault_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_backup_vault), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan( + backupplan.GetBackupPlanRequest(), + name="name_value", ) - await client.update_backup_vault(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupVaultRequest() - assert args[0] == request_msg +def test_list_backup_plans_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_vault_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.list_backup_plans in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_vault), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.delete_backup_vault(request=None) + client._transport._wrapped_methods[ + client._transport.list_backup_plans + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupVaultRequest() + request = {} + client.list_backup_plans(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_backup_plans(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_data_sources_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_plans_rest_required_fields( + request_type=backupplan.ListBackupPlansRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_data_sources), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupvault.ListDataSourcesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_data_sources(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListDataSourcesRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_data_source_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plans._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) ) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupvault.DataSource( - name="name_value", - state=backupvault.DataSource.State.CREATING, - backup_count=1278, - etag="etag_value", - total_stored_bytes=1946, - config_state=backupvault.BackupConfigState.ACTIVE, - ) - ) - await client.get_data_source(request=None) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetDataSourceRequest() + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_data_source_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_data_source), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_data_source(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateDataSourceRequest() + response = client.list_backup_plans(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backups_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_backup_plans_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_backups), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupvault.ListBackupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + unset_fields = transport.list_backup_plans._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - await client.list_backups(request=None) + & set(("parent",)) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.ListBackupsRequest() - assert args[0] == request_msg +def test_list_backup_plans_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlansResponse() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupvault.Backup( - name="name_value", - description="description_value", - etag="etag_value", - state=backupvault.Backup.State.CREATING, - backup_type=backupvault.Backup.BackupType.SCHEDULED, - resource_size_bytes=2056, - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", ) - await client.get_backup(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.GetBackupRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlansResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.list_backup_plans(**mock_args) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlans" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_list_backup_plans_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plans( + backupplan.ListBackupPlansRequest(), + parent="parent_value", ) - await client.update_backup(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.UpdateBackupRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_backup_plans_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[], + next_page_token="def", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlansResponse( + backup_plans=[ + backupplan.BackupPlan(), + backupplan.BackupPlan(), + ], + ), ) - await client.delete_backup(request=None) + # Two responses for two calls + response = response + response - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.DeleteBackupRequest() + # Wrap the values into proper Response objs + response = tuple( + backupplan.ListBackupPlansResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert args[0] == request_msg + sample_request = {"parent": "projects/sample1/locations/sample2"} + pager = client.list_backup_plans(request=sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_restore_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlan) for i in results) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.restore_backup(request=None) + pages = list(client.list_backup_plans(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupvault.RestoreBackupRequest() - assert args[0] == request_msg +def test_delete_backup_plan_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_plan_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.create_backup_plan(request=None) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.CreateBackupPlanRequest() - - assert args[0] == request_msg + request = {} + client.delete_backup_plan(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_plan_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.BackupPlan( - name="name_value", - description="description_value", - state=backupplan.BackupPlan.State.CREATING, - resource_type="resource_type_value", - etag="etag_value", - backup_vault="backup_vault_value", - backup_vault_service_account="backup_vault_service_account_value", - ) - ) - await client.get_backup_plan(request=None) + client.delete_backup_plan(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.GetBackupPlanRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_delete_backup_plan_rest_required_fields( + request_type=backupplan.DeleteBackupPlanRequest, +): + transport_class = transports.BackupDRRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_plans_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plans), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplan.ListBackupPlansResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_backup_plans(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.ListBackupPlansRequest() + # verify fields with default values are dropped - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_plan_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + jsonified_request["name"] = "name_value" - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_backup_plan), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_backup_plan(request=None) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplan.DeleteBackupPlanRequest() + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - assert args[0] == request_msg + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_backup_plan_association_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_backup_plan_association), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_backup_plan_association(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.CreateBackupPlanAssociationRequest() + response = client.delete_backup_plan(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_backup_plan_association_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_backup_plan_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_backup_plan_association), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.BackupPlanAssociation( - name="name_value", - resource_type="resource_type_value", - resource="resource_value", - backup_plan="backup_plan_value", - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source="data_source_value", - ) - ) - await client.get_backup_plan_association(request=None) + unset_fields = transport.delete_backup_plan._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.GetBackupPlanAssociationRequest() - assert args[0] == request_msg +def test_delete_backup_plan_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_backup_plan_associations_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" + } - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_backup_plan_associations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - backupplanassociation.ListBackupPlanAssociationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.list_backup_plan_associations(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.ListBackupPlanAssociationsRequest() + mock_args.update(sample_request) - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_backup_plan(**mock_args) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_backup_plan_association_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_plan_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan( + backupplan.DeleteBackupPlanRequest(), + name="name_value", + ) + + +def test_get_backup_plan_revision_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_revision + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_revision + ] = mock_rpc + + request = {} + client.get_backup_plan_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_revision_rest_required_fields( + request_type=backupplan.GetBackupPlanRevisionRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_revision._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_revision._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlanRevision() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.BackupPlanRevision.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backup_plan_revision(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_revision_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan_revision._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_backup_plan_revision_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.BackupPlanRevision() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3/revisions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.BackupPlanRevision.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backup_plan_revision(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlans/*/revisions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_plan_revision_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_revision( + backupplan.GetBackupPlanRevisionRequest(), + name="name_value", + ) + + +def test_list_backup_plan_revisions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_plan_revisions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plan_revisions + ] = mock_rpc + + request = {} + client.list_backup_plan_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plan_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_plan_revisions_rest_required_fields( + request_type=backupplan.ListBackupPlanRevisionsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plan_revisions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plan_revisions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlanRevisionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlanRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backup_plan_revisions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_plan_revisions_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_plan_revisions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_backup_plan_revisions_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplan.ListBackupPlanRevisionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlanRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backup_plan_revisions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/backupPlans/*}/revisions" + % client.transport._host, + args[1], + ) + + +def test_list_backup_plan_revisions_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plan_revisions( + backupplan.ListBackupPlanRevisionsRequest(), + parent="parent_value", + ) + + +def test_list_backup_plan_revisions_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + next_page_token="abc", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[], + next_page_token="def", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + ], + next_page_token="ghi", + ), + backupplan.ListBackupPlanRevisionsResponse( + backup_plan_revisions=[ + backupplan.BackupPlanRevision(), + backupplan.BackupPlanRevision(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupplan.ListBackupPlanRevisionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/backupPlans/sample3" + } + + pager = client.list_backup_plan_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, backupplan.BackupPlanRevision) for i in results) + + pages = list(client.list_backup_plan_revisions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_backup_plan_association + ] = mock_rpc + + request = {} + client.create_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["backup_plan_association_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "backupPlanAssociationId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == request_init["backup_plan_association_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["backupPlanAssociationId"] = "backup_plan_association_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_backup_plan_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "backup_plan_association_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "backupPlanAssociationId" in jsonified_request + assert ( + jsonified_request["backupPlanAssociationId"] + == "backup_plan_association_id_value" + ) + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_backup_plan_association(request) + + expected_params = [ + ( + "backupPlanAssociationId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_backup_plan_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "backupPlanAssociationId", + "requestId", + ) + ) + & set( + ( + "parent", + "backupPlanAssociationId", + "backupPlanAssociation", + ) + ) + ) + + +def test_create_backup_plan_association_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_backup_plan_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" + % client.transport._host, + args[1], + ) + + +def test_create_backup_plan_association_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_backup_plan_association( + backupplanassociation.CreateBackupPlanAssociationRequest(), + parent="parent_value", + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + backup_plan_association_id="backup_plan_association_id_value", + ) + + +def test_update_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_backup_plan_association + ] = mock_rpc + + request = {} + client.update_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.UpdateBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_backup_plan_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_backup_plan_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_backup_plan_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "backupPlanAssociation", + "updateMask", + ) + ) + ) + + +def test_update_backup_plan_association_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "backup_plan_association": { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_backup_plan_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{backup_plan_association.name=projects/*/locations/*/backupPlanAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_update_backup_plan_association_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_backup_plan_association( + backupplanassociation.UpdateBackupPlanAssociationRequest(), + backup_plan_association=backupplanassociation.BackupPlanAssociation( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_get_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_backup_plan_association + ] = mock_rpc + + request = {} + client.get_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.GetBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_backup_plan_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_backup_plan_association._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_backup_plan_association_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.BackupPlanAssociation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_backup_plan_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_get_backup_plan_association_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_backup_plan_association( + backupplanassociation.GetBackupPlanAssociationRequest(), + name="name_value", + ) + + +def test_list_backup_plan_associations_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_backup_plan_associations + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_backup_plan_associations + ] = mock_rpc + + request = {} + client.list_backup_plan_associations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_backup_plan_associations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_backup_plan_associations_rest_required_fields( + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_backup_plan_associations._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_backup_plan_associations(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_backup_plan_associations_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_backup_plan_associations._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_backup_plan_associations_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_backup_plan_associations(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations" + % client.transport._host, + args[1], + ) + + +def test_list_backup_plan_associations_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_backup_plan_associations( + backupplanassociation.ListBackupPlanAssociationsRequest(), + parent="parent_value", + ) + + +def test_list_backup_plan_associations_rest_pager(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.ListBackupPlanAssociationsResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupplanassociation.ListBackupPlanAssociationsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_backup_plan_associations(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) + + pages = list(client.list_backup_plan_associations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_fetch_backup_plan_associations_for_resource_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_backup_plan_associations_for_resource_type + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_backup_plan_associations_for_resource_type + ] = mock_rpc + + request = {} + client.fetch_backup_plan_associations_for_resource_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_backup_plan_associations_for_resource_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_backup_plan_associations_for_resource_type_rest_required_fields( + request_type=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["resource_type"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "resourceType" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_backup_plan_associations_for_resource_type._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "resourceType" in jsonified_request + assert jsonified_request["resourceType"] == request_init["resource_type"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["resourceType"] = "resource_type_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_backup_plan_associations_for_resource_type._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "resource_type", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "resourceType" in jsonified_request + assert jsonified_request["resourceType"] == "resource_type_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.fetch_backup_plan_associations_for_resource_type(request) + + expected_params = [ + ( + "resourceType", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_backup_plan_associations_for_resource_type_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_backup_plan_associations_for_resource_type._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "resourceType", + ) + ) + & set( + ( + "parent", + "resourceType", + ) + ) + ) + + +def test_fetch_backup_plan_associations_for_resource_type_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + resource_type="resource_type_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.fetch_backup_plan_associations_for_resource_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/backupPlanAssociations:fetchForResourceType" + % client.transport._host, + args[1], + ) + + +def test_fetch_backup_plan_associations_for_resource_type_rest_flattened_error( + transport: str = "rest", +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_backup_plan_associations_for_resource_type( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest(), + parent="parent_value", + resource_type="resource_type_value", + ) + + +def test_fetch_backup_plan_associations_for_resource_type_rest_pager( + transport: str = "rest", +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="abc", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[], + next_page_token="def", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + ], + next_page_token="ghi", + ), + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + backup_plan_associations=[ + backupplanassociation.BackupPlanAssociation(), + backupplanassociation.BackupPlanAssociation(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.fetch_backup_plan_associations_for_resource_type( + request=sample_request + ) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, backupplanassociation.BackupPlanAssociation) for i in results + ) + + pages = list( + client.fetch_backup_plan_associations_for_resource_type( + request=sample_request + ).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_backup_plan_association_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_backup_plan_association + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_backup_plan_association + ] = mock_rpc + + request = {} + client.delete_backup_plan_association(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_backup_plan_association(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_backup_plan_association_rest_required_fields( + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_backup_plan_association._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_backup_plan_association(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_backup_plan_association_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_backup_plan_association._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + + +def test_delete_backup_plan_association_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_backup_plan_association(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_backup_plan_association_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_backup_plan_association( + backupplanassociation.DeleteBackupPlanAssociationRequest(), + name="name_value", + ) + + +def test_trigger_backup_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.trigger_backup in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.trigger_backup] = mock_rpc + + request = {} + client.trigger_backup(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.trigger_backup(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_trigger_backup_rest_required_fields( + request_type=backupplanassociation.TriggerBackupRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request_init["rule_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).trigger_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["ruleId"] = "rule_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).trigger_backup._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "ruleId" in jsonified_request + assert jsonified_request["ruleId"] == "rule_id_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.trigger_backup(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_trigger_backup_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.trigger_backup._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "ruleId", + ) + ) + ) + + +def test_trigger_backup_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + rule_id="rule_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.trigger_backup(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/backupPlanAssociations/*}:triggerBackup" + % client.transport._host, + args[1], + ) + + +def test_trigger_backup_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.trigger_backup( + backupplanassociation.TriggerBackupRequest(), + name="name_value", + rule_id="rule_id_value", + ) + + +def test_get_data_source_reference_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_data_source_reference + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_data_source_reference + ] = mock_rpc + + request = {} + client.get_data_source_reference(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_source_reference(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_source_reference_rest_required_fields( + request_type=datasourcereference.GetDataSourceReferenceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source_reference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source_reference._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datasourcereference.DataSourceReference() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datasourcereference.DataSourceReference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_source_reference(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_reference_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_source_reference._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_data_source_reference_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datasourcereference.DataSourceReference() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataSourceReferences/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = datasourcereference.DataSourceReference.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_source_reference(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataSourceReferences/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_reference_rest_flattened_error(transport: str = "rest"): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source_reference( + datasourcereference.GetDataSourceReferenceRequest(), + name="name_value", + ) + + +def test_fetch_data_source_references_for_resource_type_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.fetch_data_source_references_for_resource_type + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.fetch_data_source_references_for_resource_type + ] = mock_rpc + + request = {} + client.fetch_data_source_references_for_resource_type(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.fetch_data_source_references_for_resource_type(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_fetch_data_source_references_for_resource_type_rest_required_fields( + request_type=datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["resource_type"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "resourceType" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_data_source_references_for_resource_type._get_unset_required_fields( + jsonified_request + ) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "resourceType" in jsonified_request + assert jsonified_request["resourceType"] == request_init["resource_type"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["resourceType"] = "resource_type_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).fetch_data_source_references_for_resource_type._get_unset_required_fields( + jsonified_request + ) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + "resource_type", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "resourceType" in jsonified_request + assert jsonified_request["resourceType"] == "resource_type_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.fetch_data_source_references_for_resource_type(request) + + expected_params = [ + ( + "resourceType", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_fetch_data_source_references_for_resource_type_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.fetch_data_source_references_for_resource_type._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + "resourceType", + ) + ) + & set( + ( + "parent", + "resourceType", + ) + ) + ) + + +def test_fetch_data_source_references_for_resource_type_rest_flattened(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + resource_type="resource_type_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.fetch_data_source_references_for_resource_type(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dataSourceReferences:fetchForResourceType" + % client.transport._host, + args[1], + ) + + +def test_fetch_data_source_references_for_resource_type_rest_flattened_error( + transport: str = "rest", +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_data_source_references_for_resource_type( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest(), + parent="parent_value", + resource_type="resource_type_value", + ) + + +def test_fetch_data_source_references_for_resource_type_rest_pager( + transport: str = "rest", +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + next_page_token="abc", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[], + next_page_token="def", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + ], + next_page_token="ghi", + ), + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + data_source_references=[ + datasourcereference.DataSourceReference(), + datasourcereference.DataSourceReference(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.to_json( + x + ) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.fetch_data_source_references_for_resource_type( + request=sample_request + ) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, datasourcereference.DataSourceReference) for i in results + ) + + pages = list( + client.fetch_data_source_references_for_resource_type( + request=sample_request + ).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_initialize_service_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.initialize_service in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.initialize_service + ] = mock_rpc + + request = {} + client.initialize_service(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.initialize_service(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_initialize_service_rest_required_fields( + request_type=backupdr.InitializeServiceRequest, +): + transport_class = transports.BackupDRRestTransport + + request_init = {} + request_init["name"] = "" + request_init["resource_type"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).initialize_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["resourceType"] = "resource_type_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).initialize_service._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "resourceType" in jsonified_request + assert jsonified_request["resourceType"] == "resource_type_value" + + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.initialize_service(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_initialize_service_rest_unset_required_fields(): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.initialize_service._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "resourceType", + ) + ) + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BackupDRGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BackupDRGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackupDRClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BackupDRGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BackupDRClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BackupDRClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BackupDRGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BackupDRClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BackupDRGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BackupDRClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BackupDRGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BackupDRGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BackupDRGrpcTransport, + transports.BackupDRGrpcAsyncIOTransport, + transports.BackupDRRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = BackupDRClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_management_servers_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_management_servers), "__call__" + ) as call: + call.return_value = backupdr.ListManagementServersResponse() + client.list_management_servers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.ListManagementServersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_management_server_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_management_server), "__call__" + ) as call: + call.return_value = backupdr.ManagementServer() + client.get_management_server(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.GetManagementServerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_management_server_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_management_server), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_management_server(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.CreateManagementServerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_management_server_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_server), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_management_server(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.DeleteManagementServerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_vault_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_vault(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.CreateBackupVaultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_vaults_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.ListBackupVaultsResponse() + client.list_backup_vaults(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.ListBackupVaultsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_usable_backup_vaults_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + call.return_value = backupvault.FetchUsableBackupVaultsResponse() + client.fetch_usable_backup_vaults(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.FetchUsableBackupVaultsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_vault_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + call.return_value = backupvault.BackupVault() + client.get_backup_vault(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.GetBackupVaultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_vault_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_vault(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.UpdateBackupVaultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_vault_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_vault(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.DeleteBackupVaultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_sources_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + call.return_value = backupvault.ListDataSourcesResponse() + client.list_data_sources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.ListDataSourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_source_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + call.return_value = backupvault.DataSource() + client.get_data_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.GetDataSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_source_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_data_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.UpdateDataSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backups_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + call.return_value = backupvault.ListBackupsResponse() + client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + call.return_value = backupvault.Backup() + client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.UpdateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_restore_backup_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.restore_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.RestoreBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_plan_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.CreateBackupPlanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_plan_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.UpdateBackupPlanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_plan_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + call.return_value = backupplan.BackupPlan() + client.get_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.GetBackupPlanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_plans_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + call.return_value = backupplan.ListBackupPlansResponse() + client.list_backup_plans(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.ListBackupPlansRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_plan_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.DeleteBackupPlanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_plan_revision_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_revision), "__call__" + ) as call: + call.return_value = backupplan.BackupPlanRevision() + client.get_backup_plan_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.GetBackupPlanRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_plan_revisions_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_revisions), "__call__" + ) as call: + call.return_value = backupplan.ListBackupPlanRevisionsResponse() + client.list_backup_plan_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.ListBackupPlanRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_backup_plan_association_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.CreateBackupPlanAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_plan_association_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.UpdateBackupPlanAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_plan_association_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + call.return_value = backupplanassociation.BackupPlanAssociation() + client.get_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.GetBackupPlanAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_plan_associations_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + call.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + client.list_backup_plan_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.ListBackupPlanAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_backup_plan_associations_for_resource_type_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + call.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) + client.fetch_backup_plan_associations_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_backup_plan_association_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.DeleteBackupPlanAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_trigger_backup_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.trigger_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.TriggerBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_source_reference_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + call.return_value = datasourcereference.DataSourceReference() + client.get_data_source_reference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datasourcereference.GetDataSourceReferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_data_source_references_for_resource_type_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + call.return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + client.fetch_data_source_references_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_initialize_service_empty_call_grpc(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.initialize_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.InitializeServiceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = BackupDRAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_management_servers_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_management_servers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupdr.ListManagementServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_management_servers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.ListManagementServersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_management_server_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_management_server), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupdr.ManagementServer( + name="name_value", + description="description_value", + type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, + state=backupdr.ManagementServer.InstanceState.CREATING, + etag="etag_value", + oauth2_client_id="oauth2_client_id_value", + ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, + ) + ) + await client.get_management_server(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.GetManagementServerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_management_server_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_management_server), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_management_server(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.CreateManagementServerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_management_server_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_management_server), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_management_server(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.DeleteManagementServerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backup_vault_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_backup_vault(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.CreateBackupVaultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_vaults_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_backup_vaults(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.ListBackupVaultsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_fetch_usable_backup_vaults_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_usable_backup_vaults), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.fetch_usable_backup_vaults(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.FetchUsableBackupVaultsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_vault_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup_vault), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + ) + ) + await client.get_backup_vault(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.GetBackupVaultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_vault_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_backup_vault(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.UpdateBackupVaultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_vault_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_vault), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_backup_vault(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.DeleteBackupVaultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_sources_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_sources), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_data_sources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.ListDataSourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_source_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_data_source), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + backup_blocked_by_vault_access_restriction=True, + ) + ) + await client.get_data_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.GetDataSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_source_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_source), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_data_source(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.UpdateDataSourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backups_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_backups), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.ListBackupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_backups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.ListBackupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupvault.Backup( + name="name_value", + description="description_value", + etag="etag_value", + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + satisfies_pzs=True, + satisfies_pzi=True, + ) + ) + await client.get_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.GetBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.UpdateBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.DeleteBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_restore_backup_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.restore_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.restore_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupvault.RestoreBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backup_plan_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.CreateBackupPlanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_plan_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.UpdateBackupPlanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_plan_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_backup_plan), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + log_retention_days=1929, + supported_resource_types=["supported_resource_types_value"], + revision_id="revision_id_value", + revision_name="revision_name_value", + ) + ) + await client.get_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.GetBackupPlanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_plans_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plans), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_backup_plans(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.ListBackupPlansRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_plan_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.DeleteBackupPlanRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_plan_revision_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.BackupPlanRevision( + name="name_value", + revision_id="revision_id_value", + state=backupplan.BackupPlanRevision.State.CREATING, + ) + ) + await client.get_backup_plan_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.GetBackupPlanRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_plan_revisions_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplan.ListBackupPlanRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_backup_plan_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.ListBackupPlanRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_backup_plan_association_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.CreateBackupPlanAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_backup_plan_association_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.UpdateBackupPlanAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_backup_plan_association_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + backup_plan_revision_id="backup_plan_revision_id_value", + backup_plan_revision_name="backup_plan_revision_name_value", + ) + ) + await client.get_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.GetBackupPlanAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_backup_plan_associations_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_associations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_backup_plan_associations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.ListBackupPlanAssociationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_fetch_backup_plan_associations_for_resource_type_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + next_page_token="next_page_token_value", + ) + ) + await client.fetch_backup_plan_associations_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_backup_plan_association_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_backup_plan_association), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.DeleteBackupPlanAssociationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_trigger_backup_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.trigger_backup(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.TriggerBackupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_source_reference_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.DataSourceReference( + name="name_value", + data_source="data_source_value", + data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, + data_source_backup_count=2535, + ) + ) + await client.get_data_source_reference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datasourcereference.GetDataSourceReferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_fetch_data_source_references_for_resource_type_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + next_page_token="next_page_token_value", + ) + ) + await client.fetch_data_source_references_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() + ) + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_initialize_service_empty_call_grpc_asyncio(): + client = BackupDRAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.initialize_service), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.initialize_service(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupdr.InitializeServiceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = BackupDRClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_management_servers_rest_bad_request( + request_type=backupdr.ListManagementServersRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_management_servers(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.ListManagementServersRequest, + dict, + ], +) +def test_list_management_servers_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ListManagementServersResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ListManagementServersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_management_servers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListManagementServersPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_management_servers_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_management_servers" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_management_servers_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_management_servers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupdr.ListManagementServersRequest.pb( + backupdr.ListManagementServersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backupdr.ListManagementServersResponse.to_json( + backupdr.ListManagementServersResponse() + ) + req.return_value.content = return_value + + request = backupdr.ListManagementServersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ListManagementServersResponse() + post_with_metadata.return_value = ( + backupdr.ListManagementServersResponse(), + metadata, + ) + + client.list_management_servers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_management_server_rest_bad_request( + request_type=backupdr.GetManagementServerRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_management_server(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.GetManagementServerRequest, + dict, + ], +) +def test_get_management_server_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupdr.ManagementServer( + name="name_value", + description="description_value", + type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, + state=backupdr.ManagementServer.InstanceState.CREATING, + etag="etag_value", + oauth2_client_id="oauth2_client_id_value", + ba_proxy_uri=["ba_proxy_uri_value"], + satisfies_pzi=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupdr.ManagementServer.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_management_server(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, backupdr.ManagementServer) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE + assert response.state == backupdr.ManagementServer.InstanceState.CREATING + assert response.etag == "etag_value" + assert response.oauth2_client_id == "oauth2_client_id_value" + assert response.ba_proxy_uri == ["ba_proxy_uri_value"] + assert response.satisfies_pzi is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_management_server_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupdr.GetManagementServerRequest.pb( + backupdr.GetManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backupdr.ManagementServer.to_json(backupdr.ManagementServer()) + req.return_value.content = return_value + + request = backupdr.GetManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupdr.ManagementServer() + post_with_metadata.return_value = backupdr.ManagementServer(), metadata + + client.get_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_management_server_rest_bad_request( + request_type=backupdr.CreateManagementServerRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_management_server(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.CreateManagementServerRequest, + dict, + ], +) +def test_create_management_server_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["management_server"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "type_": 1, + "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, + "workforce_identity_based_management_uri": { + "first_party_management_uri": "first_party_management_uri_value", + "third_party_management_uri": "third_party_management_uri_value", + }, + "state": 1, + "networks": [{"network": "network_value", "peering_mode": 1}], + "etag": "etag_value", + "oauth2_client_id": "oauth2_client_id_value", + "workforce_identity_based_oauth2_client_id": { + "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", + "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", + }, + "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], + "satisfies_pzs": {"value": True}, + "satisfies_pzi": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["management_server"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["management_server"][field])): + del request_init["management_server"][field][i][subfield] + else: + del request_init["management_server"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_management_server(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_create_management_server_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupdr.CreateManagementServerRequest.pb( + backupdr.CreateManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = backupdr.CreateManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_management_server_rest_bad_request( + request_type=backupdr.DeleteManagementServerRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_management_server(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupdr.DeleteManagementServerRequest, + dict, + ], +) +def test_delete_management_server_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/managementServers/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_management_server(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_management_server_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_management_server" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_delete_management_server_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_delete_management_server" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupdr.DeleteManagementServerRequest.pb( + backupdr.DeleteManagementServerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = backupdr.DeleteManagementServerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_management_server( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_backup_vault_rest_bad_request( + request_type=backupvault.CreateBackupVaultRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_backup_vault(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.CreateBackupVaultRequest, + dict, + ], +) +def test_create_backup_vault_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_vault"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.CreateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_backup_vault(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_vault_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_create_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupvault.CreateBackupVaultRequest.pb( + backupvault.CreateBackupVaultRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = backupvault.CreateBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_backup_vaults_rest_bad_request( + request_type=backupvault.ListBackupVaultsRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_backup_vaults(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListBackupVaultsRequest, + dict, + ], +) +def test_list_backup_vaults_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.ListBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + with mock.patch.object( - type(client.transport.delete_backup_plan_association), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_list_backup_vaults_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_list_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupvault.ListBackupVaultsRequest.pb( + backupvault.ListBackupVaultsRequest() ) - await client.delete_backup_plan_association(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.DeleteBackupPlanAssociationRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backupvault.ListBackupVaultsResponse.to_json( + backupvault.ListBackupVaultsResponse() + ) + req.return_value.content = return_value + + request = backupvault.ListBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.ListBackupVaultsResponse() + post_with_metadata.return_value = ( + backupvault.ListBackupVaultsResponse(), + metadata, + ) + + client.list_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_fetch_usable_backup_vaults_rest_bad_request( + request_type=backupvault.FetchUsableBackupVaultsRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.fetch_usable_backup_vaults(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.FetchUsableBackupVaultsRequest, + dict, + ], +) +def test_fetch_usable_backup_vaults_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.FetchUsableBackupVaultsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.fetch_usable_backup_vaults(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), + ) + client = BackupDRClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, + "post_fetch_usable_backup_vaults_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_fetch_usable_backup_vaults" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupvault.FetchUsableBackupVaultsRequest.pb( + backupvault.FetchUsableBackupVaultsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backupvault.FetchUsableBackupVaultsResponse.to_json( + backupvault.FetchUsableBackupVaultsResponse() + ) + req.return_value.content = return_value + + request = backupvault.FetchUsableBackupVaultsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.FetchUsableBackupVaultsResponse() + post_with_metadata.return_value = ( + backupvault.FetchUsableBackupVaultsResponse(), + metadata, + ) + + client.fetch_usable_backup_vaults( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_backup_vault_rest_bad_request( + request_type=backupvault.GetBackupVaultRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) - assert args[0] == request_msg + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_backup_vault(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_trigger_backup_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +@pytest.mark.parametrize( + "request_type", + [ + backupvault.GetBackupVaultRequest, + dict, + ], +) +def test_get_backup_vault_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.trigger_backup), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = backupvault.BackupVault( + name="name_value", + description="description_value", + deletable=True, + etag="etag_value", + state=backupvault.BackupVault.State.CREATING, + backup_count=1278, + service_account="service_account_value", + total_stored_bytes=1946, + uid="uid_value", + access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, ) - await client.trigger_backup(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupplanassociation.TriggerBackupRequest() + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = backupvault.BackupVault.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_backup_vault(request) + # Establish that the response is the type that we expect. + assert isinstance(response, backupvault.BackupVault) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.deletable is True + assert response.etag == "etag_value" + assert response.state == backupvault.BackupVault.State.CREATING + assert response.backup_count == 1278 + assert response.service_account == "service_account_value" + assert response.total_stored_bytes == 1946 + assert response.uid == "uid_value" + assert ( + response.access_restriction + == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_initialize_service_empty_call_grpc_asyncio(): - client = BackupDRAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_backup_vault_rest_interceptors(null_interceptor): + transport = transports.BackupDRRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), ) + client = BackupDRClient(transport=transport) - # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.initialize_service), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_vault" + ) as post, mock.patch.object( + transports.BackupDRRestInterceptor, "post_get_backup_vault_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.BackupDRRestInterceptor, "pre_get_backup_vault" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = backupvault.GetBackupVaultRequest.pb( + backupvault.GetBackupVaultRequest() ) - await client.initialize_service(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = backupdr.InitializeServiceRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = backupvault.BackupVault.to_json(backupvault.BackupVault()) + req.return_value.content = return_value - assert args[0] == request_msg + request = backupvault.GetBackupVaultRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = backupvault.BackupVault() + post_with_metadata.return_value = backupvault.BackupVault(), metadata + client.get_backup_vault( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = BackupDRClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_management_servers_rest_bad_request( - request_type=backupdr.ListManagementServersRequest, +def test_update_backup_vault_rest_bad_request( + request_type=backupvault.UpdateBackupVaultRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_management_servers(request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_backup_vault(request) + + +@pytest.mark.parametrize( + "request_type", + [ + backupvault.UpdateBackupVaultRequest, + dict, + ], +) +def test_update_backup_vault_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "backup_vault": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3" + } + } + request_init["backup_vault"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, + "deletable": True, + "etag": "etag_value", + "state": 1, + "effective_time": {}, + "backup_count": 1278, + "service_account": "service_account_value", + "total_stored_bytes": 1946, + "uid": "uid_value", + "annotations": {}, + "access_restriction": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupvault.UpdateBackupVaultRequest.meta.fields["backup_vault"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - backupdr.ListManagementServersRequest, - dict, - ], -) -def test_list_management_servers_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_vault"][field])): + del request_init["backup_vault"][field][i][subfield] + else: + del request_init["backup_vault"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ListManagementServersResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupdr.ListManagementServersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_management_servers(request) + response = client.update_backup_vault(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListManagementServersPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_management_servers_rest_interceptors(null_interceptor): +def test_update_backup_vault_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -19782,17 +26394,19 @@ def test_list_management_servers_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_management_servers" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup_vault" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_management_servers_with_metadata" + transports.BackupDRRestInterceptor, "post_update_backup_vault_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_management_servers" + transports.BackupDRRestInterceptor, "pre_update_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupdr.ListManagementServersRequest.pb( - backupdr.ListManagementServersRequest() + pb_message = backupvault.UpdateBackupVaultRequest.pb( + backupvault.UpdateBackupVaultRequest() ) transcode.return_value = { "method": "post", @@ -19804,24 +26418,19 @@ def test_list_management_servers_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupdr.ListManagementServersResponse.to_json( - backupdr.ListManagementServersResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupdr.ListManagementServersRequest() + request = backupvault.UpdateBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ListManagementServersResponse() - post_with_metadata.return_value = ( - backupdr.ListManagementServersResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_management_servers( + client.update_backup_vault( request, metadata=[ ("key", "val"), @@ -19834,16 +26443,14 @@ def test_list_management_servers_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_management_server_rest_bad_request( - request_type=backupdr.GetManagementServerRequest, +def test_delete_backup_vault_rest_bad_request( + request_type=backupvault.DeleteBackupVaultRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19858,67 +26465,45 @@ def test_get_management_server_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_management_server(request) + client.delete_backup_vault(request) @pytest.mark.parametrize( "request_type", [ - backupdr.GetManagementServerRequest, + backupvault.DeleteBackupVaultRequest, dict, ], ) -def test_get_management_server_rest_call_success(request_type): +def test_delete_backup_vault_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" - } + request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupdr.ManagementServer( - name="name_value", - description="description_value", - type_=backupdr.ManagementServer.InstanceType.BACKUP_RESTORE, - state=backupdr.ManagementServer.InstanceState.CREATING, - etag="etag_value", - oauth2_client_id="oauth2_client_id_value", - ba_proxy_uri=["ba_proxy_uri_value"], - satisfies_pzi=True, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupdr.ManagementServer.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_management_server(request) + response = client.delete_backup_vault(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupdr.ManagementServer) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.type_ == backupdr.ManagementServer.InstanceType.BACKUP_RESTORE - assert response.state == backupdr.ManagementServer.InstanceState.CREATING - assert response.etag == "etag_value" - assert response.oauth2_client_id == "oauth2_client_id_value" - assert response.ba_proxy_uri == ["ba_proxy_uri_value"] - assert response.satisfies_pzi is True + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_management_server_rest_interceptors(null_interceptor): +def test_delete_backup_vault_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -19930,17 +26515,19 @@ def test_get_management_server_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_management_server" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_vault" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_management_server_with_metadata" + transports.BackupDRRestInterceptor, "post_delete_backup_vault_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_management_server" + transports.BackupDRRestInterceptor, "pre_delete_backup_vault" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupdr.GetManagementServerRequest.pb( - backupdr.GetManagementServerRequest() + pb_message = backupvault.DeleteBackupVaultRequest.pb( + backupvault.DeleteBackupVaultRequest() ) transcode.return_value = { "method": "post", @@ -19952,183 +26539,100 @@ def test_get_management_server_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupdr.ManagementServer.to_json(backupdr.ManagementServer()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupdr.GetManagementServerRequest() + request = backupvault.DeleteBackupVaultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupdr.ManagementServer() - post_with_metadata.return_value = backupdr.ManagementServer(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_management_server( + client.delete_backup_vault( request, metadata=[ ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_create_management_server_rest_bad_request( - request_type=backupdr.CreateManagementServerRequest, -): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_management_server(request) - - -@pytest.mark.parametrize( - "request_type", - [ - backupdr.CreateManagementServerRequest, - dict, - ], -) -def test_create_management_server_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["management_server"] = { - "name": "name_value", - "description": "description_value", - "labels": {}, - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "type_": 1, - "management_uri": {"web_ui": "web_ui_value", "api": "api_value"}, - "workforce_identity_based_management_uri": { - "first_party_management_uri": "first_party_management_uri_value", - "third_party_management_uri": "third_party_management_uri_value", - }, - "state": 1, - "networks": [{"network": "network_value", "peering_mode": 1}], - "etag": "etag_value", - "oauth2_client_id": "oauth2_client_id_value", - "workforce_identity_based_oauth2_client_id": { - "first_party_oauth2_client_id": "first_party_oauth2_client_id_value", - "third_party_oauth2_client_id": "third_party_oauth2_client_id_value", - }, - "ba_proxy_uri": ["ba_proxy_uri_value1", "ba_proxy_uri_value2"], - "satisfies_pzs": {"value": True}, - "satisfies_pzi": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupdr.CreateManagementServerRequest.meta.fields["management_server"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + ("cephalopod", "squid"), + ], + ) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +def test_list_data_sources_rest_bad_request( + request_type=backupvault.ListDataSourcesRequest, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request = request_type(**request_init) - subfields_not_in_runtime = [] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_sources(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["management_server"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + backupvault.ListDataSourcesRequest, + dict, + ], +) +def test_list_data_sources_rest_call_success(request_type): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["management_server"][field])): - del request_init["management_server"][field][i][subfield] - else: - del request_init["management_server"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.ListDataSourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.ListDataSourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_management_server(request) + response = client.list_data_sources(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_management_server_rest_interceptors(null_interceptor): +def test_list_data_sources_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -20140,20 +26644,17 @@ def test_create_management_server_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_management_server" + transports.BackupDRRestInterceptor, "post_list_data_sources" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, - "post_create_management_server_with_metadata", + transports.BackupDRRestInterceptor, "post_list_data_sources_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_create_management_server" + transports.BackupDRRestInterceptor, "pre_list_data_sources" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupdr.CreateManagementServerRequest.pb( - backupdr.CreateManagementServerRequest() + pb_message = backupvault.ListDataSourcesRequest.pb( + backupvault.ListDataSourcesRequest() ) transcode.return_value = { "method": "post", @@ -20165,19 +26666,24 @@ def test_create_management_server_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backupvault.ListDataSourcesResponse.to_json( + backupvault.ListDataSourcesResponse() + ) req.return_value.content = return_value - request = backupdr.CreateManagementServerRequest() + request = backupvault.ListDataSourcesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backupvault.ListDataSourcesResponse() + post_with_metadata.return_value = ( + backupvault.ListDataSourcesResponse(), + metadata, + ) - client.create_management_server( + client.list_data_sources( request, metadata=[ ("key", "val"), @@ -20190,15 +26696,15 @@ def test_create_management_server_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_management_server_rest_bad_request( - request_type=backupdr.DeleteManagementServerRequest, +def test_get_data_source_rest_bad_request( + request_type=backupvault.GetDataSourceRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" } request = request_type(**request_init) @@ -20214,47 +26720,65 @@ def test_delete_management_server_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_management_server(request) + client.get_data_source(request) @pytest.mark.parametrize( "request_type", [ - backupdr.DeleteManagementServerRequest, + backupvault.GetDataSourceRequest, dict, ], ) -def test_delete_management_server_rest_call_success(request_type): +def test_get_data_source_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/managementServers/sample3" + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupvault.DataSource( + name="name_value", + state=backupvault.DataSource.State.CREATING, + backup_count=1278, + etag="etag_value", + total_stored_bytes=1946, + config_state=backupvault.BackupConfigState.ACTIVE, + backup_blocked_by_vault_access_restriction=True, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupvault.DataSource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_management_server(request) + response = client.get_data_source(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, backupvault.DataSource) + assert response.name == "name_value" + assert response.state == backupvault.DataSource.State.CREATING + assert response.backup_count == 1278 + assert response.etag == "etag_value" + assert response.total_stored_bytes == 1946 + assert response.config_state == backupvault.BackupConfigState.ACTIVE + assert response.backup_blocked_by_vault_access_restriction is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_management_server_rest_interceptors(null_interceptor): +def test_get_data_source_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -20266,20 +26790,17 @@ def test_delete_management_server_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_management_server" + transports.BackupDRRestInterceptor, "post_get_data_source" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, - "post_delete_management_server_with_metadata", + transports.BackupDRRestInterceptor, "post_get_data_source_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_management_server" + transports.BackupDRRestInterceptor, "pre_get_data_source" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupdr.DeleteManagementServerRequest.pb( - backupdr.DeleteManagementServerRequest() + pb_message = backupvault.GetDataSourceRequest.pb( + backupvault.GetDataSourceRequest() ) transcode.return_value = { "method": "post", @@ -20291,19 +26812,19 @@ def test_delete_management_server_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backupvault.DataSource.to_json(backupvault.DataSource()) req.return_value.content = return_value - request = backupdr.DeleteManagementServerRequest() + request = backupvault.GetDataSourceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backupvault.DataSource() + post_with_metadata.return_value = backupvault.DataSource(), metadata - client.delete_management_server( + client.get_data_source( request, metadata=[ ("key", "val"), @@ -20316,14 +26837,18 @@ def test_delete_management_server_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_backup_vault_rest_bad_request( - request_type=backupvault.CreateBackupVaultRequest, +def test_update_data_source_rest_bad_request( + request_type=backupvault.UpdateDataSourceRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20338,47 +26863,112 @@ def test_create_backup_vault_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_vault(request) + client.update_data_source(request) @pytest.mark.parametrize( "request_type", [ - backupvault.CreateBackupVaultRequest, + backupvault.UpdateDataSourceRequest, dict, ], ) -def test_create_backup_vault_rest_call_success(request_type): +def test_update_data_source_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["backup_vault"] = { - "name": "name_value", - "description": "description_value", + request_init = { + "data_source": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } + } + request_init["data_source"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4", + "state": 1, "labels": {}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, - "deletable": True, - "etag": "etag_value", - "state": 1, - "effective_time": {}, "backup_count": 1278, - "service_account": "service_account_value", + "etag": "etag_value", "total_stored_bytes": 1946, - "uid": "uid_value", - "annotations": {}, - "access_restriction": 1, + "config_state": 1, + "backup_config_info": { + "last_backup_state": 1, + "last_successful_backup_consistency_time": {}, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "gcp_backup_config": { + "backup_plan": "backup_plan_value", + "backup_plan_description": "backup_plan_description_value", + "backup_plan_association": "backup_plan_association_value", + "backup_plan_rules": [ + "backup_plan_rules_value1", + "backup_plan_rules_value2", + ], + "backup_plan_revision_name": "backup_plan_revision_name_value", + "backup_plan_revision_id": "backup_plan_revision_id_value", + }, + "backup_appliance_backup_config": { + "backup_appliance_name": "backup_appliance_name_value", + "backup_appliance_id": 1966, + "sla_id": 620, + "application_name": "application_name_value", + "host_name": "host_name_value", + "slt_name": "slt_name_value", + "slp_name": "slp_name_value", + }, + }, + "data_source_gcp_resource": { + "gcp_resourcename": "gcp_resourcename_value", + "location": "location_value", + "type_": "type__value", + "compute_instance_datasource_properties": { + "name": "name_value", + "description": "description_value", + "machine_type": "machine_type_value", + "total_disk_count": 1718, + "total_disk_size_gb": 1904, + }, + "cloud_sql_instance_datasource_properties": { + "name": "name_value", + "database_installed_version": "database_installed_version_value", + "instance_create_time": {}, + "instance_tier": "instance_tier_value", + }, + "disk_datasource_properties": { + "name": "name_value", + "description": "description_value", + "type_": "type__value", + "size_gb": 739, + }, + }, + "data_source_backup_appliance_application": { + "application_name": "application_name_value", + "backup_appliance": "backup_appliance_value", + "appliance_id": 1241, + "type_": "type__value", + "application_id": 1472, + "hostname": "hostname_value", + "host_id": 746, + }, + "backup_blocked_by_vault_access_restriction": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = backupvault.CreateBackupVaultRequest.meta.fields["backup_vault"] + test_field = backupvault.UpdateDataSourceRequest.meta.fields["data_source"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20406,7 +26996,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + for field, value in request_init["data_source"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -20436,10 +27026,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["backup_vault"][field])): - del request_init["backup_vault"][field][i][subfield] + for i in range(0, len(request_init["data_source"][field])): + del request_init["data_source"][field][i][subfield] else: - del request_init["backup_vault"][field][subfield] + del request_init["data_source"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20454,14 +27044,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_vault(request) + response = client.update_data_source(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_vault_rest_interceptors(null_interceptor): +def test_update_data_source_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -20475,17 +27065,17 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_backup_vault" + transports.BackupDRRestInterceptor, "post_update_data_source" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_backup_vault_with_metadata" + transports.BackupDRRestInterceptor, "post_update_data_source_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_create_backup_vault" + transports.BackupDRRestInterceptor, "pre_update_data_source" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.CreateBackupVaultRequest.pb( - backupvault.CreateBackupVaultRequest() + pb_message = backupvault.UpdateDataSourceRequest.pb( + backupvault.UpdateDataSourceRequest() ) transcode.return_value = { "method": "post", @@ -20500,7 +27090,7 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupvault.CreateBackupVaultRequest() + request = backupvault.UpdateDataSourceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20509,139 +27099,7 @@ def test_create_backup_vault_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_backup_vault( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_list_backup_vaults_rest_bad_request( - request_type=backupvault.ListBackupVaultsRequest, -): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_vaults(request) - - -@pytest.mark.parametrize( - "request_type", - [ - backupvault.ListBackupVaultsRequest, - dict, - ], -) -def test_list_backup_vaults_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupVaultsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListBackupVaultsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_vaults(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupVaultsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_vaults_rest_interceptors(null_interceptor): - transport = transports.BackupDRRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), - ) - client = BackupDRClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backup_vaults" - ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backup_vaults_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_backup_vaults" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = backupvault.ListBackupVaultsRequest.pb( - backupvault.ListBackupVaultsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.ListBackupVaultsResponse.to_json( - backupvault.ListBackupVaultsResponse() - ) - req.return_value.content = return_value - - request = backupvault.ListBackupVaultsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = backupvault.ListBackupVaultsResponse() - post_with_metadata.return_value = ( - backupvault.ListBackupVaultsResponse(), - metadata, - ) - - client.list_backup_vaults( + client.update_data_source( request, metadata=[ ("key", "val"), @@ -20654,14 +27112,14 @@ def test_list_backup_vaults_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_fetch_usable_backup_vaults_rest_bad_request( - request_type=backupvault.FetchUsableBackupVaultsRequest, -): +def test_list_backups_rest_bad_request(request_type=backupvault.ListBackupsRequest): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20676,29 +27134,31 @@ def test_fetch_usable_backup_vaults_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.fetch_usable_backup_vaults(request) + client.list_backups(request) @pytest.mark.parametrize( "request_type", [ - backupvault.FetchUsableBackupVaultsRequest, + backupvault.ListBackupsRequest, dict, ], ) -def test_fetch_usable_backup_vaults_rest_call_success(request_type): +def test_list_backups_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.FetchUsableBackupVaultsResponse( + return_value = backupvault.ListBackupsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -20708,21 +27168,21 @@ def test_fetch_usable_backup_vaults_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.FetchUsableBackupVaultsResponse.pb(return_value) + return_value = backupvault.ListBackupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.fetch_usable_backup_vaults(request) + response = client.list_backups(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.FetchUsableBackupVaultsPager) + assert isinstance(response, pagers.ListBackupsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): +def test_list_backups_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -20734,19 +27194,16 @@ def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_fetch_usable_backup_vaults" + transports.BackupDRRestInterceptor, "post_list_backups" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, - "post_fetch_usable_backup_vaults_with_metadata", + transports.BackupDRRestInterceptor, "post_list_backups_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_fetch_usable_backup_vaults" + transports.BackupDRRestInterceptor, "pre_list_backups" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.FetchUsableBackupVaultsRequest.pb( - backupvault.FetchUsableBackupVaultsRequest() - ) + pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20757,24 +27214,21 @@ def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.FetchUsableBackupVaultsResponse.to_json( - backupvault.FetchUsableBackupVaultsResponse() + return_value = backupvault.ListBackupsResponse.to_json( + backupvault.ListBackupsResponse() ) req.return_value.content = return_value - request = backupvault.FetchUsableBackupVaultsRequest() + request = backupvault.ListBackupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupvault.FetchUsableBackupVaultsResponse() - post_with_metadata.return_value = ( - backupvault.FetchUsableBackupVaultsResponse(), - metadata, - ) + post.return_value = backupvault.ListBackupsResponse() + post_with_metadata.return_value = backupvault.ListBackupsResponse(), metadata - client.fetch_usable_backup_vaults( + client.list_backups( request, metadata=[ ("key", "val"), @@ -20787,14 +27241,14 @@ def test_fetch_usable_backup_vaults_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_backup_vault_rest_bad_request( - request_type=backupvault.GetBackupVaultRequest, -): +def test_get_backup_rest_bad_request(request_type=backupvault.GetBackupRequest): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20809,39 +27263,39 @@ def test_get_backup_vault_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_vault(request) + client.get_backup(request) @pytest.mark.parametrize( "request_type", [ - backupvault.GetBackupVaultRequest, + backupvault.GetBackupRequest, dict, ], ) -def test_get_backup_vault_rest_call_success(request_type): +def test_get_backup_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.BackupVault( + return_value = backupvault.Backup( name="name_value", description="description_value", - deletable=True, etag="etag_value", - state=backupvault.BackupVault.State.CREATING, - backup_count=1278, - service_account="service_account_value", - total_stored_bytes=1946, - uid="uid_value", - access_restriction=backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT, + state=backupvault.Backup.State.CREATING, + backup_type=backupvault.Backup.BackupType.SCHEDULED, + resource_size_bytes=2056, + satisfies_pzs=True, + satisfies_pzi=True, ) # Wrap the value into a proper Response obj @@ -20849,32 +27303,27 @@ def test_get_backup_vault_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.BackupVault.pb(return_value) + return_value = backupvault.Backup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_vault(request) + response = client.get_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.BackupVault) + assert isinstance(response, backupvault.Backup) assert response.name == "name_value" assert response.description == "description_value" - assert response.deletable is True assert response.etag == "etag_value" - assert response.state == backupvault.BackupVault.State.CREATING - assert response.backup_count == 1278 - assert response.service_account == "service_account_value" - assert response.total_stored_bytes == 1946 - assert response.uid == "uid_value" - assert ( - response.access_restriction - == backupvault.BackupVault.AccessRestriction.WITHIN_PROJECT - ) + assert response.state == backupvault.Backup.State.CREATING + assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED + assert response.resource_size_bytes == 2056 + assert response.satisfies_pzs is True + assert response.satisfies_pzi is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_vault_rest_interceptors(null_interceptor): +def test_get_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -20886,18 +27335,16 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_backup_vault" + transports.BackupDRRestInterceptor, "post_get_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_backup_vault_with_metadata" + transports.BackupDRRestInterceptor, "post_get_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_backup_vault" + transports.BackupDRRestInterceptor, "pre_get_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.GetBackupVaultRequest.pb( - backupvault.GetBackupVaultRequest() - ) + pb_message = backupvault.GetBackupRequest.pb(backupvault.GetBackupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20908,19 +27355,19 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.BackupVault.to_json(backupvault.BackupVault()) + return_value = backupvault.Backup.to_json(backupvault.Backup()) req.return_value.content = return_value - request = backupvault.GetBackupVaultRequest() + request = backupvault.GetBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupvault.BackupVault() - post_with_metadata.return_value = backupvault.BackupVault(), metadata + post.return_value = backupvault.Backup() + post_with_metadata.return_value = backupvault.Backup(), metadata - client.get_backup_vault( + client.get_backup( request, metadata=[ ("key", "val"), @@ -20933,16 +27380,14 @@ def test_get_backup_vault_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_backup_vault_rest_bad_request( - request_type=backupvault.UpdateBackupVaultRequest, -): +def test_update_backup_rest_bad_request(request_type=backupvault.UpdateBackupRequest): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup_vault": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" } } request = request_type(**request_init) @@ -20959,51 +27404,196 @@ def test_update_backup_vault_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup_vault(request) + client.update_backup(request) @pytest.mark.parametrize( "request_type", [ - backupvault.UpdateBackupVaultRequest, + backupvault.UpdateBackupRequest, dict, ], ) -def test_update_backup_vault_rest_call_success(request_type): +def test_update_backup_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "backup_vault": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3" + "backup": { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" } } - request_init["backup_vault"] = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3", + request_init["backup"] = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5", "description": "description_value", - "labels": {}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "backup_minimum_enforced_retention_duration": {"seconds": 751, "nanos": 543}, - "deletable": True, + "labels": {}, + "enforced_retention_end_time": {}, + "expire_time": {}, + "consistency_time": {}, "etag": "etag_value", "state": 1, - "effective_time": {}, - "backup_count": 1278, - "service_account": "service_account_value", - "total_stored_bytes": 1946, - "uid": "uid_value", - "annotations": {}, - "access_restriction": 1, + "service_locks": [ + { + "lock_until_time": {}, + "backup_appliance_lock_info": { + "backup_appliance_id": 1966, + "backup_appliance_name": "backup_appliance_name_value", + "lock_reason": "lock_reason_value", + "job_name": "job_name_value", + "backup_image": "backup_image_value", + "sla_id": 620, + }, + "service_lock_info": {"operation": "operation_value"}, + } + ], + "backup_appliance_locks": {}, + "compute_instance_backup_properties": { + "description": "description_value", + "tags": {"items": ["items_value1", "items_value2"]}, + "machine_type": "machine_type_value", + "can_ip_forward": True, + "network_interface": [ + { + "network": "network_value", + "subnetwork": "subnetwork_value", + "ip_address": "ip_address_value", + "ipv6_address": "ipv6_address_value", + "internal_ipv6_prefix_length": 2831, + "name": "name_value", + "access_configs": [ + { + "type_": 1, + "name": "name_value", + "external_ip": "external_ip_value", + "external_ipv6": "external_ipv6_value", + "external_ipv6_prefix_length": 2837, + "set_public_ptr": True, + "public_ptr_domain_name": "public_ptr_domain_name_value", + "network_tier": 1, + } + ], + "ipv6_access_configs": {}, + "alias_ip_ranges": [ + { + "ip_cidr_range": "ip_cidr_range_value", + "subnetwork_range_name": "subnetwork_range_name_value", + } + ], + "stack_type": 1, + "ipv6_access_type": 1, + "queue_count": 1197, + "nic_type": 1, + "network_attachment": "network_attachment_value", + } + ], + "disk": [ + { + "initialize_params": { + "disk_name": "disk_name_value", + "replica_zones": [ + "replica_zones_value1", + "replica_zones_value2", + ], + }, + "device_name": "device_name_value", + "kind": "kind_value", + "disk_type_deprecated": 1, + "mode": 1, + "source": "source_value", + "index": 536, + "boot": True, + "auto_delete": True, + "license_": ["license__value1", "license__value2"], + "disk_interface": 1, + "guest_os_feature": [{"type_": 1}], + "disk_encryption_key": { + "raw_key": "raw_key_value", + "rsa_encrypted_key": "rsa_encrypted_key_value", + "kms_key_name": "kms_key_name_value", + "kms_key_service_account": "kms_key_service_account_value", + }, + "disk_size_gb": 1261, + "saved_state": 1, + "disk_type": "disk_type_value", + "type_": 1, + } + ], + "metadata": {"items": [{"key": "key_value", "value": "value_value"}]}, + "service_account": [ + {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} + ], + "scheduling": { + "on_host_maintenance": 1, + "automatic_restart": True, + "preemptible": True, + "node_affinities": [ + { + "key": "key_value", + "operator": 1, + "values": ["values_value1", "values_value2"], + } + ], + "min_node_cpus": 1379, + "provisioning_model": 1, + "instance_termination_action": 1, + "local_ssd_recovery_timeout": {"seconds": 751, "nanos": 543}, + }, + "guest_accelerator": [ + { + "accelerator_type": "accelerator_type_value", + "accelerator_count": 1805, + } + ], + "min_cpu_platform": "min_cpu_platform_value", + "key_revocation_action_type": 1, + "source_instance": "source_instance_value", + "labels": {}, + }, + "cloud_sql_instance_backup_properties": { + "database_installed_version": "database_installed_version_value", + "final_backup": True, + "source_instance": "source_instance_value", + "instance_tier": "instance_tier_value", + }, + "backup_appliance_backup_properties": { + "generation_id": 1368, + "finalize_time": {}, + "recovery_range_start_time": {}, + "recovery_range_end_time": {}, + }, + "disk_backup_properties": { + "description": "description_value", + "licenses": ["licenses_value1", "licenses_value2"], + "guest_os_feature": {}, + "architecture": 1, + "type_": "type__value", + "size_gb": 739, + "region": "region_value", + "zone": "zone_value", + "replica_zones": ["replica_zones_value1", "replica_zones_value2"], + "source_disk": "source_disk_value", + }, + "backup_type": 1, + "gcp_backup_plan_info": { + "backup_plan": "backup_plan_value", + "backup_plan_rule_id": "backup_plan_rule_id_value", + "backup_plan_revision_name": "backup_plan_revision_name_value", + "backup_plan_revision_id": "backup_plan_revision_id_value", + }, + "resource_size_bytes": 2056, + "satisfies_pzs": True, + "satisfies_pzi": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = backupvault.UpdateBackupVaultRequest.meta.fields["backup_vault"] + test_field = backupvault.UpdateBackupRequest.meta.fields["backup"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -21031,7 +27621,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_vault"].items(): # pragma: NO COVER + for field, value in request_init["backup"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -21061,10 +27651,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["backup_vault"][field])): - del request_init["backup_vault"][field][i][subfield] + for i in range(0, len(request_init["backup"][field])): + del request_init["backup"][field][i][subfield] else: - del request_init["backup_vault"][field][subfield] + del request_init["backup"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21079,14 +27669,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup_vault(request) + response = client.update_backup(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_vault_rest_interceptors(null_interceptor): +def test_update_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -21100,17 +27690,17 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_update_backup_vault" + transports.BackupDRRestInterceptor, "post_update_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_update_backup_vault_with_metadata" + transports.BackupDRRestInterceptor, "post_update_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_update_backup_vault" + transports.BackupDRRestInterceptor, "pre_update_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.UpdateBackupVaultRequest.pb( - backupvault.UpdateBackupVaultRequest() + pb_message = backupvault.UpdateBackupRequest.pb( + backupvault.UpdateBackupRequest() ) transcode.return_value = { "method": "post", @@ -21125,7 +27715,7 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupvault.UpdateBackupVaultRequest() + request = backupvault.UpdateBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21134,7 +27724,7 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_backup_vault( + client.update_backup( request, metadata=[ ("key", "val"), @@ -21147,14 +27737,14 @@ def test_update_backup_vault_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_backup_vault_rest_bad_request( - request_type=backupvault.DeleteBackupVaultRequest, -): +def test_delete_backup_rest_bad_request(request_type=backupvault.DeleteBackupRequest): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21169,23 +27759,25 @@ def test_delete_backup_vault_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_vault(request) + client.delete_backup(request) @pytest.mark.parametrize( "request_type", [ - backupvault.DeleteBackupVaultRequest, + backupvault.DeleteBackupRequest, dict, ], ) -def test_delete_backup_vault_rest_call_success(request_type): +def test_delete_backup_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21200,14 +27792,14 @@ def test_delete_backup_vault_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_vault(request) + response = client.delete_backup(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_vault_rest_interceptors(null_interceptor): +def test_delete_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -21221,17 +27813,17 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_backup_vault" + transports.BackupDRRestInterceptor, "post_delete_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_backup_vault_with_metadata" + transports.BackupDRRestInterceptor, "post_delete_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_backup_vault" + transports.BackupDRRestInterceptor, "pre_delete_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.DeleteBackupVaultRequest.pb( - backupvault.DeleteBackupVaultRequest() + pb_message = backupvault.DeleteBackupRequest.pb( + backupvault.DeleteBackupRequest() ) transcode.return_value = { "method": "post", @@ -21246,7 +27838,7 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupvault.DeleteBackupVaultRequest() + request = backupvault.DeleteBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21255,7 +27847,7 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_backup_vault( + client.delete_backup( request, metadata=[ ("key", "val"), @@ -21268,14 +27860,14 @@ def test_delete_backup_vault_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_data_sources_rest_bad_request( - request_type=backupvault.ListDataSourcesRequest, -): +def test_restore_backup_rest_bad_request(request_type=backupvault.RestoreBackupRequest): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21290,53 +27882,47 @@ def test_list_data_sources_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_data_sources(request) + client.restore_backup(request) @pytest.mark.parametrize( "request_type", [ - backupvault.ListDataSourcesRequest, + backupvault.RestoreBackupRequest, dict, ], ) -def test_list_data_sources_rest_call_success(request_type): +def test_restore_backup_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/backupVaults/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.ListDataSourcesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.ListDataSourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_data_sources(request) + response = client.restore_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDataSourcesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_data_sources_rest_interceptors(null_interceptor): +def test_restore_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -21348,17 +27934,19 @@ def test_list_data_sources_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_data_sources" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_restore_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_data_sources_with_metadata" + transports.BackupDRRestInterceptor, "post_restore_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_data_sources" + transports.BackupDRRestInterceptor, "pre_restore_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.ListDataSourcesRequest.pb( - backupvault.ListDataSourcesRequest() + pb_message = backupvault.RestoreBackupRequest.pb( + backupvault.RestoreBackupRequest() ) transcode.return_value = { "method": "post", @@ -21370,24 +27958,19 @@ def test_list_data_sources_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.ListDataSourcesResponse.to_json( - backupvault.ListDataSourcesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupvault.ListDataSourcesRequest() + request = backupvault.RestoreBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupvault.ListDataSourcesResponse() - post_with_metadata.return_value = ( - backupvault.ListDataSourcesResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_data_sources( + client.restore_backup( request, metadata=[ ("key", "val"), @@ -21400,16 +27983,14 @@ def test_list_data_sources_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_data_source_rest_bad_request( - request_type=backupvault.GetDataSourceRequest, +def test_create_backup_plan_rest_bad_request( + request_type=backupplan.CreateBackupPlanRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21424,63 +28005,150 @@ def test_get_data_source_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_data_source(request) + client.create_backup_plan(request) @pytest.mark.parametrize( "request_type", [ - backupvault.GetDataSourceRequest, + backupplan.CreateBackupPlanRequest, dict, ], ) -def test_get_data_source_rest_call_success(request_type): +def test_create_backup_plan_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["backup_plan"] = { + "name": "name_value", + "description": "description_value", + "labels": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "backup_rules": [ + { + "rule_id": "rule_id_value", + "backup_retention_days": 2237, + "standard_schedule": { + "recurrence_type": 1, + "hourly_frequency": 1748, + "days_of_week": [1], + "days_of_month": [1387, 1388], + "week_day_of_month": {"week_of_month": 1, "day_of_week": 1}, + "months": [1], + "backup_window": { + "start_hour_of_day": 1820, + "end_hour_of_day": 1573, + }, + "time_zone": "time_zone_value", + }, + } + ], + "state": 1, + "resource_type": "resource_type_value", + "etag": "etag_value", + "backup_vault": "backup_vault_value", + "backup_vault_service_account": "backup_vault_service_account_value", + "log_retention_days": 1929, + "supported_resource_types": [ + "supported_resource_types_value1", + "supported_resource_types_value2", + ], + "revision_id": "revision_id_value", + "revision_name": "revision_name_value", } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplan.CreateBackupPlanRequest.meta.fields["backup_plan"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["backup_plan"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan"][field])): + del request_init["backup_plan"][field][i][subfield] + else: + del request_init["backup_plan"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.DataSource( - name="name_value", - state=backupvault.DataSource.State.CREATING, - backup_count=1278, - etag="etag_value", - total_stored_bytes=1946, - config_state=backupvault.BackupConfigState.ACTIVE, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupvault.DataSource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_data_source(request) + response = client.create_backup_plan(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.DataSource) - assert response.name == "name_value" - assert response.state == backupvault.DataSource.State.CREATING - assert response.backup_count == 1278 - assert response.etag == "etag_value" - assert response.total_stored_bytes == 1946 - assert response.config_state == backupvault.BackupConfigState.ACTIVE + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_data_source_rest_interceptors(null_interceptor): +def test_create_backup_plan_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -21492,17 +28160,19 @@ def test_get_data_source_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_data_source" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_create_backup_plan" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_data_source_with_metadata" + transports.BackupDRRestInterceptor, "post_create_backup_plan_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_data_source" + transports.BackupDRRestInterceptor, "pre_create_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.GetDataSourceRequest.pb( - backupvault.GetDataSourceRequest() + pb_message = backupplan.CreateBackupPlanRequest.pb( + backupplan.CreateBackupPlanRequest() ) transcode.return_value = { "method": "post", @@ -21514,19 +28184,19 @@ def test_get_data_source_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.DataSource.to_json(backupvault.DataSource()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupvault.GetDataSourceRequest() + request = backupplan.CreateBackupPlanRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupvault.DataSource() - post_with_metadata.return_value = backupvault.DataSource(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_data_source( + client.create_backup_plan( request, metadata=[ ("key", "val"), @@ -21539,16 +28209,16 @@ def test_get_data_source_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_data_source_rest_bad_request( - request_type=backupvault.UpdateDataSourceRequest, +def test_update_backup_plan_rest_bad_request( + request_type=backupplan.UpdateBackupPlanRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "data_source": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + "backup_plan": { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" } } request = request_type(**request_init) @@ -21565,97 +28235,71 @@ def test_update_data_source_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_data_source(request) + client.update_backup_plan(request) @pytest.mark.parametrize( "request_type", [ - backupvault.UpdateDataSourceRequest, + backupplan.UpdateBackupPlanRequest, dict, ], ) -def test_update_data_source_rest_call_success(request_type): +def test_update_backup_plan_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "data_source": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" + "backup_plan": { + "name": "projects/sample1/locations/sample2/backupPlans/sample3" } } - request_init["data_source"] = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4", - "state": 1, + request_init["backup_plan"] = { + "name": "projects/sample1/locations/sample2/backupPlans/sample3", + "description": "description_value", "labels": {}, "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "backup_count": 1278, + "backup_rules": [ + { + "rule_id": "rule_id_value", + "backup_retention_days": 2237, + "standard_schedule": { + "recurrence_type": 1, + "hourly_frequency": 1748, + "days_of_week": [1], + "days_of_month": [1387, 1388], + "week_day_of_month": {"week_of_month": 1, "day_of_week": 1}, + "months": [1], + "backup_window": { + "start_hour_of_day": 1820, + "end_hour_of_day": 1573, + }, + "time_zone": "time_zone_value", + }, + } + ], + "state": 1, + "resource_type": "resource_type_value", "etag": "etag_value", - "total_stored_bytes": 1946, - "config_state": 1, - "backup_config_info": { - "last_backup_state": 1, - "last_successful_backup_consistency_time": {}, - "last_backup_error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "gcp_backup_config": { - "backup_plan": "backup_plan_value", - "backup_plan_description": "backup_plan_description_value", - "backup_plan_association": "backup_plan_association_value", - "backup_plan_rules": [ - "backup_plan_rules_value1", - "backup_plan_rules_value2", - ], - }, - "backup_appliance_backup_config": { - "backup_appliance_name": "backup_appliance_name_value", - "backup_appliance_id": 1966, - "sla_id": 620, - "application_name": "application_name_value", - "host_name": "host_name_value", - "slt_name": "slt_name_value", - "slp_name": "slp_name_value", - }, - }, - "data_source_gcp_resource": { - "gcp_resourcename": "gcp_resourcename_value", - "location": "location_value", - "type_": "type__value", - "compute_instance_datasource_properties": { - "name": "name_value", - "description": "description_value", - "machine_type": "machine_type_value", - "total_disk_count": 1718, - "total_disk_size_gb": 1904, - }, - }, - "data_source_backup_appliance_application": { - "application_name": "application_name_value", - "backup_appliance": "backup_appliance_value", - "appliance_id": 1241, - "type_": "type__value", - "application_id": 1472, - "hostname": "hostname_value", - "host_id": 746, - }, + "backup_vault": "backup_vault_value", + "backup_vault_service_account": "backup_vault_service_account_value", + "log_retention_days": 1929, + "supported_resource_types": [ + "supported_resource_types_value1", + "supported_resource_types_value2", + ], + "revision_id": "revision_id_value", + "revision_name": "revision_name_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = backupvault.UpdateDataSourceRequest.meta.fields["data_source"] + test_field = backupplan.UpdateBackupPlanRequest.meta.fields["backup_plan"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -21683,7 +28327,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["data_source"].items(): # pragma: NO COVER + for field, value in request_init["backup_plan"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -21713,10 +28357,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["data_source"][field])): - del request_init["data_source"][field][i][subfield] + for i in range(0, len(request_init["backup_plan"][field])): + del request_init["backup_plan"][field][i][subfield] else: - del request_init["data_source"][field][subfield] + del request_init["backup_plan"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21731,14 +28375,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_data_source(request) + response = client.update_backup_plan(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_data_source_rest_interceptors(null_interceptor): +def test_update_backup_plan_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -21752,17 +28396,17 @@ def test_update_data_source_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_update_data_source" + transports.BackupDRRestInterceptor, "post_update_backup_plan" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_update_data_source_with_metadata" + transports.BackupDRRestInterceptor, "post_update_backup_plan_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_update_data_source" + transports.BackupDRRestInterceptor, "pre_update_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.UpdateDataSourceRequest.pb( - backupvault.UpdateDataSourceRequest() + pb_message = backupplan.UpdateBackupPlanRequest.pb( + backupplan.UpdateBackupPlanRequest() ) transcode.return_value = { "method": "post", @@ -21777,7 +28421,7 @@ def test_update_data_source_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupvault.UpdateDataSourceRequest() + request = backupplan.UpdateBackupPlanRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21786,7 +28430,7 @@ def test_update_data_source_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_data_source( + client.update_backup_plan( request, metadata=[ ("key", "val"), @@ -21799,14 +28443,12 @@ def test_update_data_source_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_backups_rest_bad_request(request_type=backupvault.ListBackupsRequest): +def test_get_backup_plan_rest_bad_request(request_type=backupplan.GetBackupPlanRequest): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21821,33 +28463,40 @@ def test_list_backups_rest_bad_request(request_type=backupvault.ListBackupsReque response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backups(request) + client.get_backup_plan(request) @pytest.mark.parametrize( "request_type", [ - backupvault.ListBackupsRequest, + backupplan.GetBackupPlanRequest, dict, ], ) -def test_list_backups_rest_call_success(request_type): +def test_get_backup_plan_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.ListBackupsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = backupplan.BackupPlan( + name="name_value", + description="description_value", + state=backupplan.BackupPlan.State.CREATING, + resource_type="resource_type_value", + etag="etag_value", + backup_vault="backup_vault_value", + backup_vault_service_account="backup_vault_service_account_value", + log_retention_days=1929, + supported_resource_types=["supported_resource_types_value"], + revision_id="revision_id_value", + revision_name="revision_name_value", ) # Wrap the value into a proper Response obj @@ -21855,21 +28504,30 @@ def test_list_backups_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.ListBackupsResponse.pb(return_value) + return_value = backupplan.BackupPlan.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backups(request) + response = client.get_backup_plan(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backupplan.BackupPlan) + assert response.name == "name_value" + assert response.description == "description_value" + assert response.state == backupplan.BackupPlan.State.CREATING + assert response.resource_type == "resource_type_value" + assert response.etag == "etag_value" + assert response.backup_vault == "backup_vault_value" + assert response.backup_vault_service_account == "backup_vault_service_account_value" + assert response.log_retention_days == 1929 + assert response.supported_resource_types == ["supported_resource_types_value"] + assert response.revision_id == "revision_id_value" + assert response.revision_name == "revision_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backups_rest_interceptors(null_interceptor): +def test_get_backup_plan_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -21881,16 +28539,18 @@ def test_list_backups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backups" + transports.BackupDRRestInterceptor, "post_get_backup_plan" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backups_with_metadata" + transports.BackupDRRestInterceptor, "post_get_backup_plan_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_backups" + transports.BackupDRRestInterceptor, "pre_get_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.ListBackupsRequest.pb(backupvault.ListBackupsRequest()) + pb_message = backupplan.GetBackupPlanRequest.pb( + backupplan.GetBackupPlanRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21901,21 +28561,19 @@ def test_list_backups_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.ListBackupsResponse.to_json( - backupvault.ListBackupsResponse() - ) + return_value = backupplan.BackupPlan.to_json(backupplan.BackupPlan()) req.return_value.content = return_value - request = backupvault.ListBackupsRequest() + request = backupplan.GetBackupPlanRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupvault.ListBackupsResponse() - post_with_metadata.return_value = backupvault.ListBackupsResponse(), metadata + post.return_value = backupplan.BackupPlan() + post_with_metadata.return_value = backupplan.BackupPlan(), metadata - client.list_backups( + client.get_backup_plan( request, metadata=[ ("key", "val"), @@ -21928,14 +28586,14 @@ def test_list_backups_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_backup_rest_bad_request(request_type=backupvault.GetBackupRequest): +def test_list_backup_plans_rest_bad_request( + request_type=backupplan.ListBackupPlansRequest, +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21950,37 +28608,31 @@ def test_get_backup_rest_bad_request(request_type=backupvault.GetBackupRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup(request) + client.list_backup_plans(request) @pytest.mark.parametrize( "request_type", [ - backupvault.GetBackupRequest, + backupplan.ListBackupPlansRequest, dict, ], ) -def test_get_backup_rest_call_success(request_type): +def test_list_backup_plans_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupvault.Backup( - name="name_value", - description="description_value", - etag="etag_value", - state=backupvault.Backup.State.CREATING, - backup_type=backupvault.Backup.BackupType.SCHEDULED, - resource_size_bytes=2056, + return_value = backupplan.ListBackupPlansResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -21988,25 +28640,21 @@ def test_get_backup_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupvault.Backup.pb(return_value) + return_value = backupplan.ListBackupPlansResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup(request) + response = client.list_backup_plans(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupvault.Backup) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.etag == "etag_value" - assert response.state == backupvault.Backup.State.CREATING - assert response.backup_type == backupvault.Backup.BackupType.SCHEDULED - assert response.resource_size_bytes == 2056 + assert isinstance(response, pagers.ListBackupPlansPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_rest_interceptors(null_interceptor): +def test_list_backup_plans_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -22018,16 +28666,18 @@ def test_get_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_backup" + transports.BackupDRRestInterceptor, "post_list_backup_plans" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_backup_with_metadata" + transports.BackupDRRestInterceptor, "post_list_backup_plans_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_backup" + transports.BackupDRRestInterceptor, "pre_list_backup_plans" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.GetBackupRequest.pb(backupvault.GetBackupRequest()) + pb_message = backupplan.ListBackupPlansRequest.pb( + backupplan.ListBackupPlansRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22038,19 +28688,21 @@ def test_get_backup_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupvault.Backup.to_json(backupvault.Backup()) + return_value = backupplan.ListBackupPlansResponse.to_json( + backupplan.ListBackupPlansResponse() + ) req.return_value.content = return_value - request = backupvault.GetBackupRequest() + request = backupplan.ListBackupPlansRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupvault.Backup() - post_with_metadata.return_value = backupvault.Backup(), metadata + post.return_value = backupplan.ListBackupPlansResponse() + post_with_metadata.return_value = backupplan.ListBackupPlansResponse(), metadata - client.get_backup( + client.list_backup_plans( request, metadata=[ ("key", "val"), @@ -22063,16 +28715,14 @@ def test_get_backup_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_backup_rest_bad_request(request_type=backupvault.UpdateBackupRequest): +def test_delete_backup_plan_rest_bad_request( + request_type=backupplan.DeleteBackupPlanRequest, +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "backup": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } - } + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22087,235 +28737,23 @@ def test_update_backup_rest_bad_request(request_type=backupvault.UpdateBackupReq response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_backup(request) + client.delete_backup_plan(request) @pytest.mark.parametrize( "request_type", [ - backupvault.UpdateBackupRequest, + backupplan.DeleteBackupPlanRequest, dict, ], ) -def test_update_backup_rest_call_success(request_type): +def test_delete_backup_plan_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "backup": { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } - } - request_init["backup"] = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5", - "description": "description_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "enforced_retention_end_time": {}, - "expire_time": {}, - "consistency_time": {}, - "etag": "etag_value", - "state": 1, - "service_locks": [ - { - "lock_until_time": {}, - "backup_appliance_lock_info": { - "backup_appliance_id": 1966, - "backup_appliance_name": "backup_appliance_name_value", - "lock_reason": "lock_reason_value", - "job_name": "job_name_value", - "backup_image": "backup_image_value", - "sla_id": 620, - }, - "service_lock_info": {"operation": "operation_value"}, - } - ], - "backup_appliance_locks": {}, - "compute_instance_backup_properties": { - "description": "description_value", - "tags": {"items": ["items_value1", "items_value2"]}, - "machine_type": "machine_type_value", - "can_ip_forward": True, - "network_interface": [ - { - "network": "network_value", - "subnetwork": "subnetwork_value", - "ip_address": "ip_address_value", - "ipv6_address": "ipv6_address_value", - "internal_ipv6_prefix_length": 2831, - "name": "name_value", - "access_configs": [ - { - "type_": 1, - "name": "name_value", - "external_ip": "external_ip_value", - "external_ipv6": "external_ipv6_value", - "external_ipv6_prefix_length": 2837, - "set_public_ptr": True, - "public_ptr_domain_name": "public_ptr_domain_name_value", - "network_tier": 1, - } - ], - "ipv6_access_configs": {}, - "alias_ip_ranges": [ - { - "ip_cidr_range": "ip_cidr_range_value", - "subnetwork_range_name": "subnetwork_range_name_value", - } - ], - "stack_type": 1, - "ipv6_access_type": 1, - "queue_count": 1197, - "nic_type": 1, - "network_attachment": "network_attachment_value", - } - ], - "disk": [ - { - "initialize_params": { - "disk_name": "disk_name_value", - "replica_zones": [ - "replica_zones_value1", - "replica_zones_value2", - ], - }, - "device_name": "device_name_value", - "kind": "kind_value", - "disk_type_deprecated": 1, - "mode": 1, - "source": "source_value", - "index": 536, - "boot": True, - "auto_delete": True, - "license_": ["license__value1", "license__value2"], - "disk_interface": 1, - "guest_os_feature": [{"type_": 1}], - "disk_encryption_key": { - "raw_key": "raw_key_value", - "rsa_encrypted_key": "rsa_encrypted_key_value", - "kms_key_name": "kms_key_name_value", - "kms_key_service_account": "kms_key_service_account_value", - }, - "disk_size_gb": 1261, - "saved_state": 1, - "disk_type": "disk_type_value", - "type_": 1, - } - ], - "metadata": {"items": [{"key": "key_value", "value": "value_value"}]}, - "service_account": [ - {"email": "email_value", "scopes": ["scopes_value1", "scopes_value2"]} - ], - "scheduling": { - "on_host_maintenance": 1, - "automatic_restart": True, - "preemptible": True, - "node_affinities": [ - { - "key": "key_value", - "operator": 1, - "values": ["values_value1", "values_value2"], - } - ], - "min_node_cpus": 1379, - "provisioning_model": 1, - "instance_termination_action": 1, - "local_ssd_recovery_timeout": {"seconds": 751, "nanos": 543}, - }, - "guest_accelerator": [ - { - "accelerator_type": "accelerator_type_value", - "accelerator_count": 1805, - } - ], - "min_cpu_platform": "min_cpu_platform_value", - "key_revocation_action_type": 1, - "source_instance": "source_instance_value", - "labels": {}, - }, - "backup_appliance_backup_properties": { - "generation_id": 1368, - "finalize_time": {}, - "recovery_range_start_time": {}, - "recovery_range_end_time": {}, - }, - "backup_type": 1, - "gcp_backup_plan_info": { - "backup_plan": "backup_plan_value", - "backup_plan_rule_id": "backup_plan_rule_id_value", - }, - "resource_size_bytes": 2056, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupvault.UpdateBackupRequest.meta.fields["backup"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup"][field])): - del request_init["backup"][field][i][subfield] - else: - del request_init["backup"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22330,14 +28768,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_backup(request) + response = client.delete_backup_plan(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_backup_rest_interceptors(null_interceptor): +def test_delete_backup_plan_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -22351,17 +28789,17 @@ def test_update_backup_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_update_backup" + transports.BackupDRRestInterceptor, "post_delete_backup_plan" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_update_backup_with_metadata" + transports.BackupDRRestInterceptor, "post_delete_backup_plan_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_update_backup" + transports.BackupDRRestInterceptor, "pre_delete_backup_plan" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.UpdateBackupRequest.pb( - backupvault.UpdateBackupRequest() + pb_message = backupplan.DeleteBackupPlanRequest.pb( + backupplan.DeleteBackupPlanRequest() ) transcode.return_value = { "method": "post", @@ -22376,7 +28814,7 @@ def test_update_backup_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupvault.UpdateBackupRequest() + request = backupplan.DeleteBackupPlanRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22385,7 +28823,7 @@ def test_update_backup_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_backup( + client.delete_backup_plan( request, metadata=[ ("key", "val"), @@ -22398,13 +28836,15 @@ def test_update_backup_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_backup_rest_bad_request(request_type=backupvault.DeleteBackupRequest): +def test_get_backup_plan_revision_rest_bad_request( + request_type=backupplan.GetBackupPlanRevisionRequest, +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + "name": "projects/sample1/locations/sample2/backupPlans/sample3/revisions/sample4" } request = request_type(**request_init) @@ -22420,47 +28860,57 @@ def test_delete_backup_rest_bad_request(request_type=backupvault.DeleteBackupReq response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup(request) + client.get_backup_plan_revision(request) @pytest.mark.parametrize( "request_type", [ - backupvault.DeleteBackupRequest, + backupplan.GetBackupPlanRevisionRequest, dict, ], ) -def test_delete_backup_rest_call_success(request_type): +def test_get_backup_plan_revision_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" + "name": "projects/sample1/locations/sample2/backupPlans/sample3/revisions/sample4" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupplan.BackupPlanRevision( + name="name_value", + revision_id="revision_id_value", + state=backupplan.BackupPlanRevision.State.CREATING, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.BackupPlanRevision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup(request) + response = client.get_backup_plan_revision(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, backupplan.BackupPlanRevision) + assert response.name == "name_value" + assert response.revision_id == "revision_id_value" + assert response.state == backupplan.BackupPlanRevision.State.CREATING @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_rest_interceptors(null_interceptor): +def test_get_backup_plan_revision_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -22472,19 +28922,18 @@ def test_delete_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_backup" + transports.BackupDRRestInterceptor, "post_get_backup_plan_revision" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_backup_with_metadata" + transports.BackupDRRestInterceptor, + "post_get_backup_plan_revision_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_backup" + transports.BackupDRRestInterceptor, "pre_get_backup_plan_revision" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.DeleteBackupRequest.pb( - backupvault.DeleteBackupRequest() + pb_message = backupplan.GetBackupPlanRevisionRequest.pb( + backupplan.GetBackupPlanRevisionRequest() ) transcode.return_value = { "method": "post", @@ -22496,19 +28945,21 @@ def test_delete_backup_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backupplan.BackupPlanRevision.to_json( + backupplan.BackupPlanRevision() + ) req.return_value.content = return_value - request = backupvault.DeleteBackupRequest() + request = backupplan.GetBackupPlanRevisionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backupplan.BackupPlanRevision() + post_with_metadata.return_value = backupplan.BackupPlanRevision(), metadata - client.delete_backup( + client.get_backup_plan_revision( request, metadata=[ ("key", "val"), @@ -22521,14 +28972,14 @@ def test_delete_backup_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_restore_backup_rest_bad_request(request_type=backupvault.RestoreBackupRequest): +def test_list_backup_plan_revisions_rest_bad_request( + request_type=backupplan.ListBackupPlanRevisionsRequest, +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } + request_init = {"parent": "projects/sample1/locations/sample2/backupPlans/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22543,47 +28994,53 @@ def test_restore_backup_rest_bad_request(request_type=backupvault.RestoreBackupR response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.restore_backup(request) + client.list_backup_plan_revisions(request) @pytest.mark.parametrize( "request_type", [ - backupvault.RestoreBackupRequest, + backupplan.ListBackupPlanRevisionsRequest, dict, ], ) -def test_restore_backup_rest_call_success(request_type): +def test_list_backup_plan_revisions_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupVaults/sample3/dataSources/sample4/backups/sample5" - } + request_init = {"parent": "projects/sample1/locations/sample2/backupPlans/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupplan.ListBackupPlanRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplan.ListBackupPlanRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.restore_backup(request) + response = client.list_backup_plan_revisions(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListBackupPlanRevisionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_restore_backup_rest_interceptors(null_interceptor): +def test_list_backup_plan_revisions_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -22595,19 +29052,18 @@ def test_restore_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_restore_backup" + transports.BackupDRRestInterceptor, "post_list_backup_plan_revisions" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_restore_backup_with_metadata" + transports.BackupDRRestInterceptor, + "post_list_backup_plan_revisions_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_restore_backup" + transports.BackupDRRestInterceptor, "pre_list_backup_plan_revisions" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupvault.RestoreBackupRequest.pb( - backupvault.RestoreBackupRequest() + pb_message = backupplan.ListBackupPlanRevisionsRequest.pb( + backupplan.ListBackupPlanRevisionsRequest() ) transcode.return_value = { "method": "post", @@ -22619,19 +29075,24 @@ def test_restore_backup_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backupplan.ListBackupPlanRevisionsResponse.to_json( + backupplan.ListBackupPlanRevisionsResponse() + ) req.return_value.content = return_value - request = backupvault.RestoreBackupRequest() + request = backupplan.ListBackupPlanRevisionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backupplan.ListBackupPlanRevisionsResponse() + post_with_metadata.return_value = ( + backupplan.ListBackupPlanRevisionsResponse(), + metadata, + ) - client.restore_backup( + client.list_backup_plan_revisions( request, metadata=[ ("key", "val"), @@ -22644,8 +29105,8 @@ def test_restore_backup_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_backup_plan_rest_bad_request( - request_type=backupplan.CreateBackupPlanRequest, +def test_create_backup_plan_association_rest_bad_request( + request_type=backupplanassociation.CreateBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -22666,60 +29127,63 @@ def test_create_backup_plan_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_plan(request) + client.create_backup_plan_association(request) @pytest.mark.parametrize( "request_type", [ - backupplan.CreateBackupPlanRequest, + backupplanassociation.CreateBackupPlanAssociationRequest, dict, ], ) -def test_create_backup_plan_rest_call_success(request_type): +def test_create_backup_plan_association_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["backup_plan"] = { + request_init["backup_plan_association"] = { "name": "name_value", - "description": "description_value", - "labels": {}, + "resource_type": "resource_type_value", + "resource": "resource_value", + "backup_plan": "backup_plan_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "backup_rules": [ + "state": 1, + "rules_config_info": [ { "rule_id": "rule_id_value", - "backup_retention_days": 2237, - "standard_schedule": { - "recurrence_type": 1, - "hourly_frequency": 1748, - "days_of_week": [1], - "days_of_month": [1387, 1388], - "week_day_of_month": {"week_of_month": 1, "day_of_week": 1}, - "months": [1], - "backup_window": { - "start_hour_of_day": 1820, - "end_hour_of_day": 1573, - }, - "time_zone": "time_zone_value", + "last_backup_state": 1, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], }, + "last_successful_backup_consistency_time": {}, } ], - "state": 1, - "resource_type": "resource_type_value", - "etag": "etag_value", - "backup_vault": "backup_vault_value", - "backup_vault_service_account": "backup_vault_service_account_value", + "data_source": "data_source_value", + "cloud_sql_instance_backup_plan_association_properties": { + "instance_create_time": {} + }, + "backup_plan_revision_id": "backup_plan_revision_id_value", + "backup_plan_revision_name": "backup_plan_revision_name_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = backupplan.CreateBackupPlanRequest.meta.fields["backup_plan"] + test_field = backupplanassociation.CreateBackupPlanAssociationRequest.meta.fields[ + "backup_plan_association" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -22747,7 +29211,9 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["backup_plan"].items(): # pragma: NO COVER + for field, value in request_init[ + "backup_plan_association" + ].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -22777,10 +29243,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["backup_plan"][field])): - del request_init["backup_plan"][field][i][subfield] + for i in range(0, len(request_init["backup_plan_association"][field])): + del request_init["backup_plan_association"][field][i][subfield] else: - del request_init["backup_plan"][field][subfield] + del request_init["backup_plan_association"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22795,14 +29261,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_plan(request) + response = client.create_backup_plan_association(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_plan_rest_interceptors(null_interceptor): +def test_create_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -22816,17 +29282,18 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_backup_plan" + transports.BackupDRRestInterceptor, "post_create_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_backup_plan_with_metadata" + transports.BackupDRRestInterceptor, + "post_create_backup_plan_association_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_create_backup_plan" + transports.BackupDRRestInterceptor, "pre_create_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplan.CreateBackupPlanRequest.pb( - backupplan.CreateBackupPlanRequest() + pb_message = backupplanassociation.CreateBackupPlanAssociationRequest.pb( + backupplanassociation.CreateBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -22841,7 +29308,7 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupplan.CreateBackupPlanRequest() + request = backupplanassociation.CreateBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22850,7 +29317,7 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_backup_plan( + client.create_backup_plan_association( request, metadata=[ ("key", "val"), @@ -22863,12 +29330,18 @@ def test_create_backup_plan_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_backup_plan_rest_bad_request(request_type=backupplan.GetBackupPlanRequest): +def test_update_backup_plan_association_rest_bad_request( + request_type=backupplanassociation.UpdateBackupPlanAssociationRequest, +): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request_init = { + "backup_plan_association": { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22883,63 +29356,152 @@ def test_get_backup_plan_rest_bad_request(request_type=backupplan.GetBackupPlanR response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_plan(request) + client.update_backup_plan_association(request) @pytest.mark.parametrize( "request_type", [ - backupplan.GetBackupPlanRequest, + backupplanassociation.UpdateBackupPlanAssociationRequest, dict, ], ) -def test_get_backup_plan_rest_call_success(request_type): +def test_update_backup_plan_association_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request_init = { + "backup_plan_association": { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } + } + request_init["backup_plan_association"] = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3", + "resource_type": "resource_type_value", + "resource": "resource_value", + "backup_plan": "backup_plan_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "state": 1, + "rules_config_info": [ + { + "rule_id": "rule_id_value", + "last_backup_state": 1, + "last_backup_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "last_successful_backup_consistency_time": {}, + } + ], + "data_source": "data_source_value", + "cloud_sql_instance_backup_plan_association_properties": { + "instance_create_time": {} + }, + "backup_plan_revision_id": "backup_plan_revision_id_value", + "backup_plan_revision_name": "backup_plan_revision_name_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = backupplanassociation.UpdateBackupPlanAssociationRequest.meta.fields[ + "backup_plan_association" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backup_plan_association" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backup_plan_association"][field])): + del request_init["backup_plan_association"][field][i][subfield] + else: + del request_init["backup_plan_association"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupplan.BackupPlan( - name="name_value", - description="description_value", - state=backupplan.BackupPlan.State.CREATING, - resource_type="resource_type_value", - etag="etag_value", - backup_vault="backup_vault_value", - backup_vault_service_account="backup_vault_service_account_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplan.BackupPlan.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_plan(request) + response = client.update_backup_plan_association(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupplan.BackupPlan) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.state == backupplan.BackupPlan.State.CREATING - assert response.resource_type == "resource_type_value" - assert response.etag == "etag_value" - assert response.backup_vault == "backup_vault_value" - assert response.backup_vault_service_account == "backup_vault_service_account_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_plan_rest_interceptors(null_interceptor): +def test_update_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -22951,17 +29513,20 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_backup_plan" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_update_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_backup_plan_with_metadata" + transports.BackupDRRestInterceptor, + "post_update_backup_plan_association_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_backup_plan" + transports.BackupDRRestInterceptor, "pre_update_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplan.GetBackupPlanRequest.pb( - backupplan.GetBackupPlanRequest() + pb_message = backupplanassociation.UpdateBackupPlanAssociationRequest.pb( + backupplanassociation.UpdateBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -22973,19 +29538,19 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupplan.BackupPlan.to_json(backupplan.BackupPlan()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupplan.GetBackupPlanRequest() + request = backupplanassociation.UpdateBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupplan.BackupPlan() - post_with_metadata.return_value = backupplan.BackupPlan(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_backup_plan( + client.update_backup_plan_association( request, metadata=[ ("key", "val"), @@ -22998,14 +29563,16 @@ def test_get_backup_plan_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_backup_plans_rest_bad_request( - request_type=backupplan.ListBackupPlansRequest, +def test_get_backup_plan_association_rest_bad_request( + request_type=backupplanassociation.GetBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23020,31 +29587,39 @@ def test_list_backup_plans_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_plans(request) + client.get_backup_plan_association(request) @pytest.mark.parametrize( "request_type", [ - backupplan.ListBackupPlansRequest, + backupplanassociation.GetBackupPlanAssociationRequest, dict, ], ) -def test_list_backup_plans_rest_call_success(request_type): +def test_get_backup_plan_association_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupplan.ListBackupPlansResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = backupplanassociation.BackupPlanAssociation( + name="name_value", + resource_type="resource_type_value", + resource="resource_value", + backup_plan="backup_plan_value", + state=backupplanassociation.BackupPlanAssociation.State.CREATING, + data_source="data_source_value", + backup_plan_revision_id="backup_plan_revision_id_value", + backup_plan_revision_name="backup_plan_revision_name_value", ) # Wrap the value into a proper Response obj @@ -23052,21 +29627,27 @@ def test_list_backup_plans_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = backupplan.ListBackupPlansResponse.pb(return_value) + return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_plans(request) + response = client.get_backup_plan_association(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlansPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, backupplanassociation.BackupPlanAssociation) + assert response.name == "name_value" + assert response.resource_type == "resource_type_value" + assert response.resource == "resource_value" + assert response.backup_plan == "backup_plan_value" + assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING + assert response.data_source == "data_source_value" + assert response.backup_plan_revision_id == "backup_plan_revision_id_value" + assert response.backup_plan_revision_name == "backup_plan_revision_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_plans_rest_interceptors(null_interceptor): +def test_get_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -23078,17 +29659,18 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backup_plans" + transports.BackupDRRestInterceptor, "post_get_backup_plan_association" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backup_plans_with_metadata" + transports.BackupDRRestInterceptor, + "post_get_backup_plan_association_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_backup_plans" + transports.BackupDRRestInterceptor, "pre_get_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplan.ListBackupPlansRequest.pb( - backupplan.ListBackupPlansRequest() + pb_message = backupplanassociation.GetBackupPlanAssociationRequest.pb( + backupplanassociation.GetBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -23100,21 +29682,24 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupplan.ListBackupPlansResponse.to_json( - backupplan.ListBackupPlansResponse() + return_value = backupplanassociation.BackupPlanAssociation.to_json( + backupplanassociation.BackupPlanAssociation() ) req.return_value.content = return_value - request = backupplan.ListBackupPlansRequest() + request = backupplanassociation.GetBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupplan.ListBackupPlansResponse() - post_with_metadata.return_value = backupplan.ListBackupPlansResponse(), metadata + post.return_value = backupplanassociation.BackupPlanAssociation() + post_with_metadata.return_value = ( + backupplanassociation.BackupPlanAssociation(), + metadata, + ) - client.list_backup_plans( + client.get_backup_plan_association( request, metadata=[ ("key", "val"), @@ -23127,14 +29712,14 @@ def test_list_backup_plans_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_backup_plan_rest_bad_request( - request_type=backupplan.DeleteBackupPlanRequest, +def test_list_backup_plan_associations_rest_bad_request( + request_type=backupplanassociation.ListBackupPlanAssociationsRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23149,45 +29734,55 @@ def test_delete_backup_plan_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_plan(request) + client.list_backup_plan_associations(request) @pytest.mark.parametrize( "request_type", [ - backupplan.DeleteBackupPlanRequest, + backupplanassociation.ListBackupPlanAssociationsRequest, dict, ], ) -def test_delete_backup_plan_rest_call_success(request_type): +def test_list_backup_plan_associations_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/backupPlans/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = backupplanassociation.ListBackupPlanAssociationsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_plan(request) + response = client.list_backup_plan_associations(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListBackupPlanAssociationsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_plan_rest_interceptors(null_interceptor): +def test_list_backup_plan_associations_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -23199,19 +29794,18 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_backup_plan" + transports.BackupDRRestInterceptor, "post_list_backup_plan_associations" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_backup_plan_with_metadata" + transports.BackupDRRestInterceptor, + "post_list_backup_plan_associations_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_backup_plan" + transports.BackupDRRestInterceptor, "pre_list_backup_plan_associations" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplan.DeleteBackupPlanRequest.pb( - backupplan.DeleteBackupPlanRequest() + pb_message = backupplanassociation.ListBackupPlanAssociationsRequest.pb( + backupplanassociation.ListBackupPlanAssociationsRequest() ) transcode.return_value = { "method": "post", @@ -23223,19 +29817,24 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backupplanassociation.ListBackupPlanAssociationsResponse.to_json( + backupplanassociation.ListBackupPlanAssociationsResponse() + ) req.return_value.content = return_value - request = backupplan.DeleteBackupPlanRequest() + request = backupplanassociation.ListBackupPlanAssociationsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() + post_with_metadata.return_value = ( + backupplanassociation.ListBackupPlanAssociationsResponse(), + metadata, + ) - client.delete_backup_plan( + client.list_backup_plan_associations( request, metadata=[ ("key", "val"), @@ -23248,8 +29847,8 @@ def test_delete_backup_plan_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_backup_plan_association_rest_bad_request( - request_type=backupplanassociation.CreateBackupPlanAssociationRequest, +def test_fetch_backup_plan_associations_for_resource_type_rest_bad_request( + request_type=backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -23264,149 +29863,67 @@ def test_create_backup_plan_association_rest_bad_request( ): # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_backup_plan_association(request) - - -@pytest.mark.parametrize( - "request_type", - [ - backupplanassociation.CreateBackupPlanAssociationRequest, - dict, - ], -) -def test_create_backup_plan_association_rest_call_success(request_type): - client = BackupDRClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["backup_plan_association"] = { - "name": "name_value", - "resource_type": "resource_type_value", - "resource": "resource_value", - "backup_plan": "backup_plan_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "state": 1, - "rules_config_info": [ - { - "rule_id": "rule_id_value", - "last_backup_state": 1, - "last_backup_error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "last_successful_backup_consistency_time": {}, - } - ], - "data_source": "data_source_value", - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = backupplanassociation.CreateBackupPlanAssociationRequest.meta.fields[ - "backup_plan_association" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.fetch_backup_plan_associations_for_resource_type(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "backup_plan_association" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest, + dict, + ], +) +def test_fetch_backup_plan_associations_for_resource_type_rest_call_success( + request_type, +): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["backup_plan_association"][field])): - del request_init["backup_plan_association"][field][i][subfield] - else: - del request_init["backup_plan_association"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse( + next_page_token="next_page_token_value", + ) + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_backup_plan_association(request) + response = client.fetch_backup_plan_associations_for_resource_type(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.FetchBackupPlanAssociationsForResourceTypePager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_backup_plan_association_rest_interceptors(null_interceptor): +def test_fetch_backup_plan_associations_for_resource_type_rest_interceptors( + null_interceptor, +): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -23418,20 +29935,20 @@ def test_create_backup_plan_association_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_create_backup_plan_association" + transports.BackupDRRestInterceptor, + "post_fetch_backup_plan_associations_for_resource_type", ) as post, mock.patch.object( transports.BackupDRRestInterceptor, - "post_create_backup_plan_association_with_metadata", + "post_fetch_backup_plan_associations_for_resource_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_create_backup_plan_association" + transports.BackupDRRestInterceptor, + "pre_fetch_backup_plan_associations_for_resource_type", ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplanassociation.CreateBackupPlanAssociationRequest.pb( - backupplanassociation.CreateBackupPlanAssociationRequest() + pb_message = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest.pb( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() ) transcode.return_value = { "method": "post", @@ -23443,19 +29960,28 @@ def test_create_backup_plan_association_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse.to_json( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) req.return_value.content = return_value - request = backupplanassociation.CreateBackupPlanAssociationRequest() + request = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + ) metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse() + ) + post_with_metadata.return_value = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeResponse(), + metadata, + ) - client.create_backup_plan_association( + client.fetch_backup_plan_associations_for_resource_type( request, metadata=[ ("key", "val"), @@ -23468,8 +29994,8 @@ def test_create_backup_plan_association_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_backup_plan_association_rest_bad_request( - request_type=backupplanassociation.GetBackupPlanAssociationRequest, +def test_delete_backup_plan_association_rest_bad_request( + request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -23492,17 +30018,17 @@ def test_get_backup_plan_association_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_backup_plan_association(request) + client.delete_backup_plan_association(request) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.GetBackupPlanAssociationRequest, + backupplanassociation.DeleteBackupPlanAssociationRequest, dict, ], ) -def test_get_backup_plan_association_rest_call_success(request_type): +def test_delete_backup_plan_association_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -23516,39 +30042,23 @@ def test_get_backup_plan_association_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupplanassociation.BackupPlanAssociation( - name="name_value", - resource_type="resource_type_value", - resource="resource_value", - backup_plan="backup_plan_value", - state=backupplanassociation.BackupPlanAssociation.State.CREATING, - data_source="data_source_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplanassociation.BackupPlanAssociation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_backup_plan_association(request) + response = client.delete_backup_plan_association(request) # Establish that the response is the type that we expect. - assert isinstance(response, backupplanassociation.BackupPlanAssociation) - assert response.name == "name_value" - assert response.resource_type == "resource_type_value" - assert response.resource == "resource_value" - assert response.backup_plan == "backup_plan_value" - assert response.state == backupplanassociation.BackupPlanAssociation.State.CREATING - assert response.data_source == "data_source_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_backup_plan_association_rest_interceptors(null_interceptor): +def test_delete_backup_plan_association_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -23560,18 +30070,20 @@ def test_get_backup_plan_association_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_get_backup_plan_association" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_delete_backup_plan_association" ) as post, mock.patch.object( transports.BackupDRRestInterceptor, - "post_get_backup_plan_association_with_metadata", + "post_delete_backup_plan_association_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_get_backup_plan_association" + transports.BackupDRRestInterceptor, "pre_delete_backup_plan_association" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplanassociation.GetBackupPlanAssociationRequest.pb( - backupplanassociation.GetBackupPlanAssociationRequest() + pb_message = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( + backupplanassociation.DeleteBackupPlanAssociationRequest() ) transcode.return_value = { "method": "post", @@ -23583,24 +30095,19 @@ def test_get_backup_plan_association_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupplanassociation.BackupPlanAssociation.to_json( - backupplanassociation.BackupPlanAssociation() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupplanassociation.GetBackupPlanAssociationRequest() + request = backupplanassociation.DeleteBackupPlanAssociationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupplanassociation.BackupPlanAssociation() - post_with_metadata.return_value = ( - backupplanassociation.BackupPlanAssociation(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_backup_plan_association( + client.delete_backup_plan_association( request, metadata=[ ("key", "val"), @@ -23613,14 +30120,16 @@ def test_get_backup_plan_association_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_backup_plan_associations_rest_bad_request( - request_type=backupplanassociation.ListBackupPlanAssociationsRequest, +def test_trigger_backup_rest_bad_request( + request_type=backupplanassociation.TriggerBackupRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23635,55 +30144,47 @@ def test_list_backup_plan_associations_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_backup_plan_associations(request) + client.trigger_backup(request) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.ListBackupPlanAssociationsRequest, + backupplanassociation.TriggerBackupRequest, dict, ], ) -def test_list_backup_plan_associations_rest_call_success(request_type): +def test_trigger_backup_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = backupplanassociation.ListBackupPlanAssociationsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = backupplanassociation.ListBackupPlanAssociationsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_backup_plan_associations(request) + response = client.trigger_backup(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBackupPlanAssociationsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_backup_plan_associations_rest_interceptors(null_interceptor): +def test_trigger_backup_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -23695,18 +30196,19 @@ def test_list_backup_plan_associations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.BackupDRRestInterceptor, "post_list_backup_plan_associations" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BackupDRRestInterceptor, "post_trigger_backup" ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, - "post_list_backup_plan_associations_with_metadata", + transports.BackupDRRestInterceptor, "post_trigger_backup_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_list_backup_plan_associations" + transports.BackupDRRestInterceptor, "pre_trigger_backup" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplanassociation.ListBackupPlanAssociationsRequest.pb( - backupplanassociation.ListBackupPlanAssociationsRequest() + pb_message = backupplanassociation.TriggerBackupRequest.pb( + backupplanassociation.TriggerBackupRequest() ) transcode.return_value = { "method": "post", @@ -23718,24 +30220,19 @@ def test_list_backup_plan_associations_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = backupplanassociation.ListBackupPlanAssociationsResponse.to_json( - backupplanassociation.ListBackupPlanAssociationsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = backupplanassociation.ListBackupPlanAssociationsRequest() + request = backupplanassociation.TriggerBackupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = backupplanassociation.ListBackupPlanAssociationsResponse() - post_with_metadata.return_value = ( - backupplanassociation.ListBackupPlanAssociationsResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_backup_plan_associations( + client.trigger_backup( request, metadata=[ ("key", "val"), @@ -23748,15 +30245,15 @@ def test_list_backup_plan_associations_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_backup_plan_association_rest_bad_request( - request_type=backupplanassociation.DeleteBackupPlanAssociationRequest, +def test_get_data_source_reference_rest_bad_request( + request_type=datasourcereference.GetDataSourceReferenceRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + "name": "projects/sample1/locations/sample2/dataSourceReferences/sample3" } request = request_type(**request_init) @@ -23772,47 +30269,61 @@ def test_delete_backup_plan_association_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_backup_plan_association(request) + client.get_data_source_reference(request) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.DeleteBackupPlanAssociationRequest, + datasourcereference.GetDataSourceReferenceRequest, dict, ], ) -def test_delete_backup_plan_association_rest_call_success(request_type): +def test_get_data_source_reference_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" + "name": "projects/sample1/locations/sample2/dataSourceReferences/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = datasourcereference.DataSourceReference( + name="name_value", + data_source="data_source_value", + data_source_backup_config_state=backupvault.BackupConfigState.ACTIVE, + data_source_backup_count=2535, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = datasourcereference.DataSourceReference.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_backup_plan_association(request) + response = client.get_data_source_reference(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, datasourcereference.DataSourceReference) + assert response.name == "name_value" + assert response.data_source == "data_source_value" + assert ( + response.data_source_backup_config_state == backupvault.BackupConfigState.ACTIVE + ) + assert response.data_source_backup_count == 2535 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_backup_plan_association_rest_interceptors(null_interceptor): +def test_get_data_source_reference_rest_interceptors(null_interceptor): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -23824,20 +30335,18 @@ def test_delete_backup_plan_association_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_delete_backup_plan_association" + transports.BackupDRRestInterceptor, "post_get_data_source_reference" ) as post, mock.patch.object( transports.BackupDRRestInterceptor, - "post_delete_backup_plan_association_with_metadata", + "post_get_data_source_reference_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_delete_backup_plan_association" + transports.BackupDRRestInterceptor, "pre_get_data_source_reference" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplanassociation.DeleteBackupPlanAssociationRequest.pb( - backupplanassociation.DeleteBackupPlanAssociationRequest() + pb_message = datasourcereference.GetDataSourceReferenceRequest.pb( + datasourcereference.GetDataSourceReferenceRequest() ) transcode.return_value = { "method": "post", @@ -23849,19 +30358,24 @@ def test_delete_backup_plan_association_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = datasourcereference.DataSourceReference.to_json( + datasourcereference.DataSourceReference() + ) req.return_value.content = return_value - request = backupplanassociation.DeleteBackupPlanAssociationRequest() + request = datasourcereference.GetDataSourceReferenceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = datasourcereference.DataSourceReference() + post_with_metadata.return_value = ( + datasourcereference.DataSourceReference(), + metadata, + ) - client.delete_backup_plan_association( + client.get_data_source_reference( request, metadata=[ ("key", "val"), @@ -23874,16 +30388,14 @@ def test_delete_backup_plan_association_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_trigger_backup_rest_bad_request( - request_type=backupplanassociation.TriggerBackupRequest, +def test_fetch_data_source_references_for_resource_type_rest_bad_request( + request_type=datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, ): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" - } + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23898,47 +30410,59 @@ def test_trigger_backup_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.trigger_backup(request) + client.fetch_data_source_references_for_resource_type(request) @pytest.mark.parametrize( "request_type", [ - backupplanassociation.TriggerBackupRequest, + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest, dict, ], ) -def test_trigger_backup_rest_call_success(request_type): +def test_fetch_data_source_references_for_resource_type_rest_call_success(request_type): client = BackupDRClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/backupPlanAssociations/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse( + next_page_token="next_page_token_value", + ) + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.pb( + return_value + ) + ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.trigger_backup(request) + response = client.fetch_data_source_references_for_resource_type(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.FetchDataSourceReferencesForResourceTypePager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_trigger_backup_rest_interceptors(null_interceptor): +def test_fetch_data_source_references_for_resource_type_rest_interceptors( + null_interceptor, +): transport = transports.BackupDRRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.BackupDRRestInterceptor(), @@ -23950,19 +30474,22 @@ def test_trigger_backup_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.BackupDRRestInterceptor, "post_trigger_backup" + transports.BackupDRRestInterceptor, + "post_fetch_data_source_references_for_resource_type", ) as post, mock.patch.object( - transports.BackupDRRestInterceptor, "post_trigger_backup_with_metadata" + transports.BackupDRRestInterceptor, + "post_fetch_data_source_references_for_resource_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackupDRRestInterceptor, "pre_trigger_backup" + transports.BackupDRRestInterceptor, + "pre_fetch_data_source_references_for_resource_type", ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = backupplanassociation.TriggerBackupRequest.pb( - backupplanassociation.TriggerBackupRequest() + pb_message = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest.pb( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() + ) ) transcode.return_value = { "method": "post", @@ -23974,19 +30501,26 @@ def test_trigger_backup_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = datasourcereference.FetchDataSourceReferencesForResourceTypeResponse.to_json( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) req.return_value.content = return_value - request = backupplanassociation.TriggerBackupRequest() + request = datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse() + ) + post_with_metadata.return_value = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeResponse(), + metadata, + ) - client.trigger_backup( + client.fetch_data_source_references_for_resource_type( request, metadata=[ ("key", "val"), @@ -25094,6 +31628,28 @@ def test_create_backup_plan_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_plan_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_plan), "__call__" + ) as call: + client.update_backup_plan(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.UpdateBackupPlanRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_backup_plan_empty_call_rest(): @@ -25158,6 +31714,50 @@ def test_delete_backup_plan_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_backup_plan_revision_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_backup_plan_revision), "__call__" + ) as call: + client.get_backup_plan_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.GetBackupPlanRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_backup_plan_revisions_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_backup_plan_revisions), "__call__" + ) as call: + client.list_backup_plan_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplan.ListBackupPlanRevisionsRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_create_backup_plan_association_empty_call_rest(): @@ -25180,6 +31780,28 @@ def test_create_backup_plan_association_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_backup_plan_association_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_backup_plan_association), "__call__" + ) as call: + client.update_backup_plan_association(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = backupplanassociation.UpdateBackupPlanAssociationRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_backup_plan_association_empty_call_rest(): @@ -25224,6 +31846,31 @@ def test_list_backup_plan_associations_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_backup_plan_associations_for_resource_type_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_backup_plan_associations_for_resource_type), + "__call__", + ) as call: + client.fetch_backup_plan_associations_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + backupplanassociation.FetchBackupPlanAssociationsForResourceTypeRequest() + ) + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_backup_plan_association_empty_call_rest(): @@ -25266,6 +31913,53 @@ def test_trigger_backup_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_source_reference_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_source_reference), "__call__" + ) as call: + client.get_data_source_reference(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datasourcereference.GetDataSourceReferenceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_fetch_data_source_references_for_resource_type_empty_call_rest(): + client = BackupDRClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.fetch_data_source_references_for_resource_type), + "__call__", + ) as call: + client.fetch_data_source_references_for_resource_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = ( + datasourcereference.FetchDataSourceReferencesForResourceTypeRequest() + ) + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_initialize_service_empty_call_rest(): @@ -25357,14 +32051,21 @@ def test_backup_dr_base_transport(): "delete_backup", "restore_backup", "create_backup_plan", + "update_backup_plan", "get_backup_plan", "list_backup_plans", "delete_backup_plan", + "get_backup_plan_revision", + "list_backup_plan_revisions", "create_backup_plan_association", + "update_backup_plan_association", "get_backup_plan_association", "list_backup_plan_associations", + "fetch_backup_plan_associations_for_resource_type", "delete_backup_plan_association", "trigger_backup", + "get_data_source_reference", + "fetch_data_source_references_for_resource_type", "initialize_service", "set_iam_policy", "get_iam_policy", @@ -25692,6 +32393,9 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.create_backup_plan._session session2 = client2.transport.create_backup_plan._session assert session1 != session2 + session1 = client1.transport.update_backup_plan._session + session2 = client2.transport.update_backup_plan._session + assert session1 != session2 session1 = client1.transport.get_backup_plan._session session2 = client2.transport.get_backup_plan._session assert session1 != session2 @@ -25701,21 +32405,43 @@ def test_backup_dr_client_transport_session_collision(transport_name): session1 = client1.transport.delete_backup_plan._session session2 = client2.transport.delete_backup_plan._session assert session1 != session2 + session1 = client1.transport.get_backup_plan_revision._session + session2 = client2.transport.get_backup_plan_revision._session + assert session1 != session2 + session1 = client1.transport.list_backup_plan_revisions._session + session2 = client2.transport.list_backup_plan_revisions._session + assert session1 != session2 session1 = client1.transport.create_backup_plan_association._session session2 = client2.transport.create_backup_plan_association._session assert session1 != session2 + session1 = client1.transport.update_backup_plan_association._session + session2 = client2.transport.update_backup_plan_association._session + assert session1 != session2 session1 = client1.transport.get_backup_plan_association._session session2 = client2.transport.get_backup_plan_association._session assert session1 != session2 session1 = client1.transport.list_backup_plan_associations._session session2 = client2.transport.list_backup_plan_associations._session assert session1 != session2 + session1 = ( + client1.transport.fetch_backup_plan_associations_for_resource_type._session + ) + session2 = ( + client2.transport.fetch_backup_plan_associations_for_resource_type._session + ) + assert session1 != session2 session1 = client1.transport.delete_backup_plan_association._session session2 = client2.transport.delete_backup_plan_association._session assert session1 != session2 session1 = client1.transport.trigger_backup._session session2 = client2.transport.trigger_backup._session assert session1 != session2 + session1 = client1.transport.get_data_source_reference._session + session2 = client2.transport.get_data_source_reference._session + assert session1 != session2 + session1 = client1.transport.fetch_data_source_references_for_resource_type._session + session2 = client2.transport.fetch_data_source_references_for_resource_type._session + assert session1 != session2 session1 = client1.transport.initialize_service._session session2 = client2.transport.initialize_service._session assert session1 != session2 @@ -25963,10 +32689,41 @@ def test_parse_backup_plan_association_path(): assert expected == actual -def test_backup_vault_path(): +def test_backup_plan_revision_path(): project = "scallop" location = "abalone" - backupvault = "squid" + backup_plan = "squid" + revision = "clam" + expected = "projects/{project}/locations/{location}/backupPlans/{backup_plan}/revisions/{revision}".format( + project=project, + location=location, + backup_plan=backup_plan, + revision=revision, + ) + actual = BackupDRClient.backup_plan_revision_path( + project, location, backup_plan, revision + ) + assert expected == actual + + +def test_parse_backup_plan_revision_path(): + expected = { + "project": "whelk", + "location": "octopus", + "backup_plan": "oyster", + "revision": "nudibranch", + } + path = BackupDRClient.backup_plan_revision_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_backup_plan_revision_path(path) + assert expected == actual + + +def test_backup_vault_path(): + project = "cuttlefish" + location = "mussel" + backupvault = "winkle" expected = ( "projects/{project}/locations/{location}/backupVaults/{backupvault}".format( project=project, @@ -25980,9 +32737,9 @@ def test_backup_vault_path(): def test_parse_backup_vault_path(): expected = { - "project": "clam", - "location": "whelk", - "backupvault": "octopus", + "project": "nautilus", + "location": "scallop", + "backupvault": "abalone", } path = BackupDRClient.backup_vault_path(**expected) @@ -25992,10 +32749,10 @@ def test_parse_backup_vault_path(): def test_data_source_path(): - project = "oyster" - location = "nudibranch" - backupvault = "cuttlefish" - datasource = "mussel" + project = "squid" + location = "clam" + backupvault = "whelk" + datasource = "octopus" expected = "projects/{project}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}".format( project=project, location=location, @@ -26008,10 +32765,10 @@ def test_data_source_path(): def test_parse_data_source_path(): expected = { - "project": "winkle", - "location": "nautilus", - "backupvault": "scallop", - "datasource": "abalone", + "project": "oyster", + "location": "nudibranch", + "backupvault": "cuttlefish", + "datasource": "mussel", } path = BackupDRClient.data_source_path(**expected) @@ -26020,10 +32777,61 @@ def test_parse_data_source_path(): assert expected == actual +def test_data_source_reference_path(): + project = "winkle" + location = "nautilus" + data_source_reference = "scallop" + expected = "projects/{project}/locations/{location}/dataSourceReferences/{data_source_reference}".format( + project=project, + location=location, + data_source_reference=data_source_reference, + ) + actual = BackupDRClient.data_source_reference_path( + project, location, data_source_reference + ) + assert expected == actual + + +def test_parse_data_source_reference_path(): + expected = { + "project": "abalone", + "location": "squid", + "data_source_reference": "clam", + } + path = BackupDRClient.data_source_reference_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_data_source_reference_path(path) + assert expected == actual + + +def test_instance_path(): + project = "whelk" + instance = "octopus" + expected = "projects/{project}/instances/{instance}".format( + project=project, + instance=instance, + ) + actual = BackupDRClient.instance_path(project, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "oyster", + "instance": "nudibranch", + } + path = BackupDRClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_instance_path(path) + assert expected == actual + + def test_management_server_path(): - project = "squid" - location = "clam" - managementserver = "whelk" + project = "cuttlefish" + location = "mussel" + managementserver = "winkle" expected = "projects/{project}/locations/{location}/managementServers/{managementserver}".format( project=project, location=location, @@ -26035,9 +32843,9 @@ def test_management_server_path(): def test_parse_management_server_path(): expected = { - "project": "octopus", - "location": "oyster", - "managementserver": "nudibranch", + "project": "nautilus", + "location": "scallop", + "managementserver": "abalone", } path = BackupDRClient.management_server_path(**expected) @@ -26046,6 +32854,32 @@ def test_parse_management_server_path(): assert expected == actual +def test_storage_pool_path(): + project = "squid" + zone = "clam" + storage_pool = "whelk" + expected = "projects/{project}/zones/{zone}/storagePools/{storage_pool}".format( + project=project, + zone=zone, + storage_pool=storage_pool, + ) + actual = BackupDRClient.storage_pool_path(project, zone, storage_pool) + assert expected == actual + + +def test_parse_storage_pool_path(): + expected = { + "project": "octopus", + "zone": "oyster", + "storage_pool": "nudibranch", + } + path = BackupDRClient.storage_pool_path(**expected) + + # Check that the path construction is reversible. + actual = BackupDRClient.parse_storage_pool_path(path) + assert expected == actual + + def test_common_billing_account_path(): billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format(