diff --git a/.librarian/state.yaml b/.librarian/state.yaml index ac9fef0923d5..c491eb84d365 100644 --- a/.librarian/state.yaml +++ b/.librarian/state.yaml @@ -2,7 +2,7 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-li libraries: - id: google-cloud-dlp version: 3.32.0 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/privacy/dlp/v2 service_config: dlp_v2.yaml @@ -21,7 +21,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-eventarc version: 1.16.0 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/eventarc/v1 service_config: eventarc_v1.yaml @@ -40,7 +40,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-video-live-stream version: 1.13.0 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/video/livestream/v1 service_config: livestream_v1.yaml @@ -59,7 +59,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-ads-marketingplatform-admin version: 0.1.6 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/marketingplatform/admin/v1alpha service_config: marketingplatformadmin_v1alpha.yaml @@ -78,7 +78,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-ai-generativelanguage version: 0.7.0 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/ai/generativelanguage/v1 service_config: generativelanguage_v1.yaml @@ -105,7 +105,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-analytics-admin version: 0.25.0 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/analytics/admin/v1beta service_config: analyticsadmin_v1beta.yaml @@ -126,7 +126,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-analytics-data version: 0.18.19 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/analytics/data/v1alpha service_config: analyticsdata_v1alpha.yaml @@ -147,7 +147,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-ads-admanager version: 0.4.0 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/ads/admanager/v1 service_config: admanager_v1.yaml @@ -166,10 +166,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-apps-card version: 0.1.8 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/apps/card/v1 - service_config: '' + service_config: "" source_roots: - packages/google-apps-card preserve_regex: @@ -186,7 +186,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-apps-chat version: 0.2.9 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/chat/v1 service_config: chat_v1.yaml @@ -205,7 +205,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-apps-events-subscriptions version: 0.2.2 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/apps/events/subscriptions/v1 service_config: workspaceevents_v1.yaml @@ -226,7 +226,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-apps-meet version: 0.1.16 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/apps/meet/v2beta service_config: meet_v2beta.yaml @@ -247,7 +247,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-area120-tables version: 0.11.17 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/area120/tables/v1alpha1 service_config: area120tables_v1alpha1.yaml @@ -266,7 +266,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-access-approval version: 1.16.2 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/accessapproval/v1 service_config: accessapproval_v1.yaml @@ -285,7 +285,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-advisorynotifications version: 0.3.16 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/advisorynotifications/v1 service_config: advisorynotifications_v1.yaml @@ -304,7 +304,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-alloydb version: 0.4.9 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/alloydb/v1beta service_config: alloydb_v1beta.yaml @@ -327,7 +327,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-alloydb-connectors version: 0.1.11 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/alloydb/connectors/v1 service_config: connectors_v1.yaml @@ -351,7 +351,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-api-gateway version: 1.12.2 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/apigateway/v1 service_config: apigateway_v1.yaml @@ -370,7 +370,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-api-keys version: 0.5.17 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/api/apikeys/v2 service_config: apikeys_v2.yaml @@ -389,9 +389,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-apigee-connect version: 1.12.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/apigeeconnect/v1 + service_config: apigeeconnect_v1.yaml source_roots: - packages/google-cloud-apigee-connect preserve_regex: @@ -407,9 +408,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-apigee-registry version: 0.6.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/apigeeregistry/v1 + service_config: apigeeregistry_v1.yaml source_roots: - packages/google-cloud-apigee-registry preserve_regex: @@ -425,9 +427,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-appengine-admin version: 1.14.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/appengine/v1 + service_config: appengine_v1.yaml source_roots: - packages/google-cloud-appengine-admin preserve_regex: @@ -443,9 +446,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-appengine-logging version: 1.6.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/appengine/logging/v1 + service_config: "" source_roots: - packages/google-cloud-appengine-logging preserve_regex: @@ -462,9 +466,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-apphub version: 0.1.10 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/apphub/v1 + service_config: apphub_v1.yaml source_roots: - packages/google-cloud-apphub preserve_regex: @@ -480,10 +485,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-artifact-registry version: 1.16.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/devtools/artifactregistry/v1 + service_config: artifactregistry_v1.yaml - path: google/devtools/artifactregistry/v1beta2 + service_config: artifactregistry_v1beta2.yaml source_roots: - packages/google-cloud-artifact-registry preserve_regex: @@ -499,10 +506,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-automl version: 2.16.4 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/automl/v1beta1 + service_config: automl_v1beta1.yaml - path: google/cloud/automl/v1 + service_config: automl_v1.yaml source_roots: - packages/google-cloud-automl preserve_regex: @@ -523,9 +532,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-backupdr version: 0.2.5 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/backupdr/v1 + service_config: backupdr_v1.yaml source_roots: - packages/google-cloud-backupdr preserve_regex: @@ -541,9 +551,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bare-metal-solution version: 1.10.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/baremetalsolution/v2 + service_config: baremetalsolution_v2.yaml source_roots: - packages/google-cloud-bare-metal-solution preserve_regex: @@ -559,10 +570,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-batch version: 0.17.37 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/batch/v1alpha + service_config: batch_v1alpha.yaml - path: google/cloud/batch/v1 + service_config: batch_v1.yaml source_roots: - packages/google-cloud-batch preserve_regex: @@ -578,10 +591,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-support version: 0.1.19 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/support/v2beta + service_config: cloudsupport_v2beta.yaml - path: google/cloud/support/v2 + service_config: cloudsupport_v2.yaml source_roots: - packages/google-cloud-support preserve_regex: @@ -597,10 +612,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-talent version: 2.17.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/talent/v4beta1 + service_config: jobs_v4beta1.yaml - path: google/cloud/talent/v4 + service_config: jobs_v4.yaml source_roots: - packages/google-cloud-talent preserve_regex: @@ -616,11 +633,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-tpu version: 1.23.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/tpu/v2alpha1 + service_config: tpu_v2alpha1.yaml - path: google/cloud/tpu/v2 + service_config: tpu_v2.yaml - path: google/cloud/tpu/v1 + service_config: tpu_v1.yaml source_roots: - packages/google-cloud-tpu preserve_regex: @@ -636,9 +656,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-video-transcoder version: 1.17.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/video/transcoder/v1 + service_config: transcoder_v1.yaml source_roots: - packages/google-cloud-video-transcoder preserve_regex: @@ -654,10 +675,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-visionai version: 0.1.10 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/visionai/v1alpha1 + service_config: visionai_v1alpha1.yaml - path: google/cloud/visionai/v1 + service_config: visionai_v1.yaml source_roots: - packages/google-cloud-visionai preserve_regex: @@ -673,9 +696,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-vm-migration version: 1.12.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/vmmigration/v1 + service_config: vmmigration_v1.yaml source_roots: - packages/google-cloud-vm-migration preserve_regex: @@ -691,9 +715,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-vmwareengine version: 1.8.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/vmwareengine/v1 + service_config: vmwareengine_v1.yaml source_roots: - packages/google-cloud-vmwareengine preserve_regex: @@ -709,9 +734,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-vpc-access version: 1.13.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/vpcaccess/v1 + service_config: vpcaccess_v1.yaml source_roots: - packages/google-cloud-vpc-access preserve_regex: @@ -727,11 +753,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-websecurityscanner version: 1.17.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/websecurityscanner/v1alpha + service_config: websecurityscanner_v1alpha.yaml - path: google/cloud/websecurityscanner/v1beta + service_config: websecurityscanner_v1beta.yaml - path: google/cloud/websecurityscanner/v1 + service_config: websecurityscanner_v1.yaml source_roots: - packages/google-cloud-websecurityscanner preserve_regex: @@ -747,10 +776,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-workstations version: 0.5.15 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/workstations/v1beta + service_config: workstations_v1beta.yaml - path: google/cloud/workstations/v1 + service_config: workstations_v1.yaml source_roots: - packages/google-cloud-workstations preserve_regex: @@ -766,9 +797,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-apihub version: 0.2.7 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/apihub/v1 + service_config: apihub_v1.yaml source_roots: - packages/google-cloud-apihub preserve_regex: @@ -784,12 +816,16 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-asset version: 3.30.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/asset/v1p5beta1 + service_config: cloudasset_v1p5beta1.yaml - path: google/cloud/asset/v1 + service_config: cloudasset_v1.yaml - path: google/cloud/asset/v1p1beta1 + service_config: cloudasset_v1p1beta1.yaml - path: google/cloud/asset/v1p2beta1 + service_config: cloudasset_v1p2beta1.yaml source_roots: - packages/google-cloud-asset preserve_regex: @@ -805,10 +841,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-assured-workloads version: 1.15.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/assuredworkloads/v1 + service_config: assuredworkloads_v1.yaml - path: google/cloud/assuredworkloads/v1beta1 + service_config: assuredworkloads_v1beta1.yaml source_roots: - packages/google-cloud-assured-workloads preserve_regex: @@ -824,9 +862,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-appconnections version: 0.4.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/beyondcorp/appconnections/v1 + service_config: beyondcorp_v1.yaml source_roots: - packages/google-cloud-beyondcorp-appconnections preserve_regex: @@ -842,9 +881,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-appconnectors version: 0.4.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/beyondcorp/appconnectors/v1 + service_config: beyondcorp_v1.yaml source_roots: - packages/google-cloud-beyondcorp-appconnectors preserve_regex: @@ -860,9 +900,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-appgateways version: 0.4.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/beyondcorp/appgateways/v1 + service_config: beyondcorp_v1.yaml source_roots: - packages/google-cloud-beyondcorp-appgateways preserve_regex: @@ -878,9 +919,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-clientconnectorservices version: 0.4.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/beyondcorp/clientconnectorservices/v1 + service_config: beyondcorp_v1.yaml source_roots: - packages/google-cloud-beyondcorp-clientconnectorservices preserve_regex: @@ -896,9 +938,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-beyondcorp-clientgateways version: 0.4.17 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/beyondcorp/clientgateways/v1 + service_config: beyondcorp_v1.yaml source_roots: - packages/google-cloud-beyondcorp-clientgateways preserve_regex: @@ -914,9 +957,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-analyticshub version: 0.4.20 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/analyticshub/v1 + service_config: analyticshub_v1.yaml source_roots: - packages/google-cloud-bigquery-analyticshub preserve_regex: @@ -932,18 +976,18 @@ libraries: tag_format: '{id}-v{version}' - id: googleapis-common-protos version: 1.70.0 - last_generated_commit: 31b413bc4feb03f6849c718048c2b9998561b5fa + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/api service_config: serviceconfig.yaml - path: google/cloud/location service_config: cloud.yaml - path: google/logging/type - service_config: '' + service_config: "" - path: google/rpc service_config: rpc_publish.yaml - path: google/rpc/context - service_config: '' + service_config: "" - path: google/type service_config: type.yaml source_roots: @@ -954,9 +998,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-storagebatchoperations version: 0.1.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/storagebatchoperations/v1 + service_config: storagebatchoperations_v1.yaml source_roots: - packages/google-cloud-storagebatchoperations preserve_regex: @@ -972,9 +1017,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-storageinsights version: 0.1.16 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/storageinsights/v1 + service_config: storageinsights_v1.yaml source_roots: - packages/google-cloud-storageinsights preserve_regex: @@ -990,11 +1036,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-tasks version: 2.19.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/tasks/v2beta2 + service_config: cloudtasks_v2beta2.yaml - path: google/cloud/tasks/v2beta3 + service_config: cloudtasks_v2beta3.yaml - path: google/cloud/tasks/v2 + service_config: cloudtasks_v2.yaml source_roots: - packages/google-cloud-tasks preserve_regex: @@ -1011,10 +1060,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-telcoautomation version: 0.2.11 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/telcoautomation/v1 + service_config: telcoautomation_v1.yaml - path: google/cloud/telcoautomation/v1alpha1 + service_config: telcoautomation_v1alpha1.yaml source_roots: - packages/google-cloud-telcoautomation preserve_regex: @@ -1031,10 +1082,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-texttospeech version: 2.31.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/texttospeech/v1 + service_config: texttospeech_v1.yaml - path: google/cloud/texttospeech/v1beta1 + service_config: texttospeech_v1beta1.yaml source_roots: - packages/google-cloud-texttospeech preserve_regex: @@ -1050,10 +1103,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-trace version: 1.16.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/devtools/cloudtrace/v2 + service_config: cloudtrace_v2.yaml - path: google/devtools/cloudtrace/v1 + service_config: cloudtrace_v1.yaml source_roots: - packages/google-cloud-trace preserve_regex: @@ -1069,13 +1124,18 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-videointelligence version: 2.16.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/videointelligence/v1p3beta1 + service_config: videointelligence_v1p3beta1.yaml - path: google/cloud/videointelligence/v1 + service_config: videointelligence_v1.yaml - path: google/cloud/videointelligence/v1p2beta1 + service_config: videointelligence_v1p2beta1.yaml - path: google/cloud/videointelligence/v1p1beta1 + service_config: videointelligence_v1p1beta1.yaml - path: google/cloud/videointelligence/v1beta2 + service_config: videointelligence_v1beta2.yaml source_roots: - packages/google-cloud-videointelligence preserve_regex: @@ -1091,13 +1151,18 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-vision version: 3.10.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/vision/v1p3beta1 + service_config: vision_v1p3beta1.yaml - path: google/cloud/vision/v1 + service_config: vision_v1.yaml - path: google/cloud/vision/v1p1beta1 + service_config: vision_v1p1beta1.yaml - path: google/cloud/vision/v1p2beta1 + service_config: vision_v1p2beta1.yaml - path: google/cloud/vision/v1p4beta1 + service_config: vision_v1p4beta1.yaml source_roots: - packages/google-cloud-vision preserve_regex: @@ -1116,10 +1181,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-webrisk version: 1.18.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/webrisk/v1beta1 + service_config: webrisk_v1beta1.yaml - path: google/cloud/webrisk/v1 + service_config: webrisk_v1.yaml source_roots: - packages/google-cloud-webrisk preserve_regex: @@ -1135,12 +1202,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-access-context-manager version: 0.2.2 - last_generated_commit: 329ace5e3712a2e37d6159d4dcd998d8c73f261e + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/identity/accesscontextmanager/v1 service_config: accesscontextmanager_v1.yaml - path: google/identity/accesscontextmanager/type - service_config: '' + service_config: "" source_roots: - packages/google-cloud-access-context-manager preserve_regex: [] @@ -1149,9 +1216,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-audit-log version: 0.3.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/audit + service_config: cloudaudit.yaml source_roots: - packages/google-cloud-audit-log preserve_regex: [] @@ -1160,10 +1228,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-biglake version: 0.4.15 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/biglake/v1alpha1 + service_config: biglake_v1alpha1.yaml - path: google/cloud/bigquery/biglake/v1 + service_config: biglake_v1.yaml source_roots: - packages/google-cloud-bigquery-biglake preserve_regex: @@ -1179,9 +1249,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-connection version: 1.18.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/connection/v1 + service_config: bigqueryconnection_v1.yaml source_roots: - packages/google-cloud-bigquery-connection preserve_regex: @@ -1197,9 +1268,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-data-exchange version: 0.5.20 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/dataexchange/v1beta1 + service_config: analyticshub_v1beta1.yaml source_roots: - packages/google-cloud-bigquery-data-exchange preserve_regex: @@ -1215,12 +1287,16 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-datapolicies version: 0.6.16 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/datapolicies/v2beta1 + service_config: bigquerydatapolicy_v2beta1.yaml - path: google/cloud/bigquery/datapolicies/v2 + service_config: bigquerydatapolicy_v2.yaml - path: google/cloud/bigquery/datapolicies/v1beta1 + service_config: bigquerydatapolicy_v1beta1.yaml - path: google/cloud/bigquery/datapolicies/v1 + service_config: bigquerydatapolicy_v1.yaml source_roots: - packages/google-cloud-bigquery-datapolicies preserve_regex: @@ -1236,9 +1312,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-datatransfer version: 3.19.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/datatransfer/v1 + service_config: bigquerydatatransfer_v1.yaml source_roots: - packages/google-cloud-bigquery-datatransfer preserve_regex: @@ -1254,9 +1331,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-logging version: 1.6.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/logging/v1 + service_config: "" source_roots: - packages/google-cloud-bigquery-logging preserve_regex: @@ -1273,10 +1351,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-migration version: 0.11.15 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/migration/v2alpha + service_config: bigquerymigration_v2alpha.yaml - path: google/cloud/bigquery/migration/v2 + service_config: bigquerymigration_v2.yaml source_roots: - packages/google-cloud-bigquery-migration preserve_regex: @@ -1292,9 +1372,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-bigquery-reservation version: 1.19.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/bigquery/reservation/v1 + service_config: bigqueryreservation_v1.yaml source_roots: - packages/google-cloud-bigquery-reservation preserve_regex: @@ -1310,9 +1391,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-billing version: 1.16.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/billing/v1 + service_config: cloudbilling_v1.yaml source_roots: - packages/google-cloud-billing preserve_regex: @@ -1328,10 +1410,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-billing-budgets version: 1.17.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/billing/budgets/v1 + service_config: billingbudgets.yaml - path: google/cloud/billing/budgets/v1beta1 + service_config: billingbudgets.yaml source_roots: - packages/google-cloud-billing-budgets preserve_regex: @@ -1347,7 +1431,7 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-translate version: 3.21.1 - last_generated_commit: eab82ec804dfd76372ca369910ccd0766ee02e5c + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/translate/v3beta1 service_config: translate_v3beta1.yaml @@ -1372,10 +1456,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-binary-authorization version: 1.13.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/binaryauthorization/v1 + service_config: binaryauthorization_v1.yaml - path: google/cloud/binaryauthorization/v1beta1 + service_config: binaryauthorization_v1beta1.yaml source_roots: - packages/google-cloud-binary-authorization preserve_regex: @@ -1391,10 +1477,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-build version: 3.32.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/devtools/cloudbuild/v1 + service_config: cloudbuild_v1.yaml - path: google/devtools/cloudbuild/v2 + service_config: cloudbuild_v2.yaml source_roots: - packages/google-cloud-build preserve_regex: @@ -1410,9 +1498,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-capacityplanner version: 0.1.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/capacityplanner/v1beta + service_config: capacityplanner_v1beta.yaml source_roots: - packages/google-cloud-capacityplanner preserve_regex: @@ -1428,9 +1517,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-certificate-manager version: 1.10.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/certificatemanager/v1 + service_config: certificatemanager_v1.yaml source_roots: - packages/google-cloud-certificate-manager preserve_regex: @@ -1446,9 +1536,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-channel version: 1.23.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/channel/v1 + service_config: cloudchannel_v1.yaml source_roots: - packages/google-cloud-channel preserve_regex: @@ -1464,9 +1555,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-chronicle version: 0.1.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/chronicle/v1 + service_config: chronicle_v1.yaml source_roots: - packages/google-cloud-chronicle preserve_regex: @@ -1482,10 +1574,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-cloudcontrolspartner version: 0.2.7 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/cloudcontrolspartner/v1beta + service_config: cloudcontrolspartner_v1beta.yaml - path: google/cloud/cloudcontrolspartner/v1 + service_config: cloudcontrolspartner_v1.yaml source_roots: - packages/google-cloud-cloudcontrolspartner preserve_regex: @@ -1501,9 +1595,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-cloudsecuritycompliance version: 0.2.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/cloudsecuritycompliance/v1 + service_config: cloudsecuritycompliance_v1.yaml source_roots: - packages/google-cloud-cloudsecuritycompliance preserve_regex: @@ -1519,10 +1614,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-commerce-consumer-procurement version: 0.2.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/commerce/consumer/procurement/v1 + service_config: cloudcommerceconsumerprocurement_v1.yaml - path: google/cloud/commerce/consumer/procurement/v1alpha1 + service_config: cloudcommerceconsumerprocurement_v1alpha1.yaml source_roots: - packages/google-cloud-commerce-consumer-procurement preserve_regex: @@ -1538,9 +1635,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-common version: 1.5.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/common + service_config: common.yaml source_roots: - packages/google-cloud-common preserve_regex: @@ -1557,9 +1655,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-compute version: 1.38.0 - last_generated_commit: d300b151a973ce0425ae4ad07b3de957ca31bec6 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/compute/v1 + service_config: compute_v1.yaml source_roots: - packages/google-cloud-compute preserve_regex: @@ -1575,9 +1674,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-compute-v1beta version: 0.1.8 - last_generated_commit: d300b151a973ce0425ae4ad07b3de957ca31bec6 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/compute/v1beta + service_config: compute_v1beta.yaml source_roots: - packages/google-cloud-compute-v1beta preserve_regex: @@ -1593,9 +1693,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-confidentialcomputing version: 0.5.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/confidentialcomputing/v1 + service_config: confidentialcomputing_v1.yaml source_roots: - packages/google-cloud-confidentialcomputing preserve_regex: @@ -1611,9 +1712,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-config version: 0.1.21 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/config/v1 + service_config: config_v1.yaml source_roots: - packages/google-cloud-config preserve_regex: @@ -1629,11 +1731,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-configdelivery version: 0.1.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/configdelivery/v1beta + service_config: configdelivery_v1beta.yaml - path: google/cloud/configdelivery/v1alpha + service_config: configdelivery_v1alpha.yaml - path: google/cloud/configdelivery/v1 + service_config: configdelivery_v1.yaml source_roots: - packages/google-cloud-configdelivery preserve_regex: @@ -1649,9 +1754,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-contact-center-insights version: 1.23.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/contactcenterinsights/v1 + service_config: contactcenterinsights_v1.yaml source_roots: - packages/google-cloud-contact-center-insights preserve_regex: @@ -1667,10 +1773,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-container version: 2.59.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/container/v1 + service_config: container_v1.yaml - path: google/container/v1beta1 + service_config: container_v1beta1.yaml source_roots: - packages/google-cloud-container preserve_regex: @@ -1686,9 +1794,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-containeranalysis version: 2.18.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/devtools/containeranalysis/v1 + service_config: containeranalysis_v1.yaml source_roots: - packages/google-cloud-containeranalysis preserve_regex: @@ -1705,9 +1814,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-contentwarehouse version: 0.7.16 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/contentwarehouse/v1 + service_config: contentwarehouse_v1.yaml source_roots: - packages/google-cloud-contentwarehouse preserve_regex: @@ -1723,9 +1833,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-data-fusion version: 1.13.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/datafusion/v1 + service_config: datafusion_v1.yaml source_roots: - packages/google-cloud-data-fusion preserve_regex: @@ -1741,12 +1852,16 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-workflows version: 1.18.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/workflows/executions/v1 + service_config: workflowexecutions_v1.yaml - path: google/cloud/workflows/executions/v1beta + service_config: workflowexecutions_v1beta.yaml - path: google/cloud/workflows/v1 + service_config: workflows_v1.yaml - path: google/cloud/workflows/v1beta + service_config: workflows_v1beta.yaml source_roots: - packages/google-cloud-workflows preserve_regex: @@ -1762,9 +1877,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-video-stitcher version: 0.7.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/video/stitcher/v1 + service_config: videostitcher_v1.yaml source_roots: - packages/google-cloud-video-stitcher preserve_regex: @@ -1780,10 +1896,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-redis-cluster version: 0.1.15 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/redis/cluster/v1 + service_config: redis_v1.yaml - path: google/cloud/redis/cluster/v1beta1 + service_config: redis_v1beta1.yaml source_roots: - packages/google-cloud-redis-cluster preserve_regex: @@ -1799,9 +1917,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-resource-manager version: 1.14.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/resourcemanager/v3 + service_config: cloudresourcemanager_v3.yaml source_roots: - packages/google-cloud-resource-manager preserve_regex: @@ -1817,11 +1936,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-retail version: 2.6.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/retail/v2 + service_config: retail_v2.yaml - path: google/cloud/retail/v2alpha + service_config: retail_v2alpha.yaml - path: google/cloud/retail/v2beta + service_config: retail_v2beta.yaml source_roots: - packages/google-cloud-retail preserve_regex: @@ -1837,9 +1959,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-run version: 0.11.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/run/v2 + service_config: run_v2.yaml source_roots: - packages/google-cloud-run preserve_regex: @@ -1855,9 +1978,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-saasplatform-saasservicemgmt version: 0.1.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/saasplatform/saasservicemgmt/v1beta1 + service_config: saasservicemgmt_v1beta1.yaml source_roots: - packages/google-cloud-saasplatform-saasservicemgmt preserve_regex: @@ -1873,10 +1997,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-scheduler version: 2.16.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/scheduler/v1 + service_config: cloudscheduler_v1.yaml - path: google/cloud/scheduler/v1beta1 + service_config: cloudscheduler_v1beta1.yaml source_roots: - packages/google-cloud-scheduler preserve_regex: @@ -1892,9 +2018,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-securesourcemanager version: 0.1.17 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/securesourcemanager/v1 + service_config: securesourcemanager_v1.yaml source_roots: - packages/google-cloud-securesourcemanager preserve_regex: @@ -1910,12 +2037,16 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-securitycenter version: 1.40.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/securitycenter/v2 + service_config: securitycenter_v2.yaml - path: google/cloud/securitycenter/v1p1beta1 + service_config: securitycenter_v1p1beta1.yaml - path: google/cloud/securitycenter/v1beta1 + service_config: securitycenter_v1beta1.yaml - path: google/cloud/securitycenter/v1 + service_config: securitycenter_v1.yaml source_roots: - packages/google-cloud-securitycenter preserve_regex: @@ -1931,9 +2062,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-securitycentermanagement version: 0.1.22 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/securitycentermanagement/v1 + service_config: securitycentermanagement_v1.yaml source_roots: - packages/google-cloud-securitycentermanagement preserve_regex: @@ -1949,10 +2081,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-service-control version: 1.16.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/api/servicecontrol/v2 + service_config: servicecontrol.yaml - path: google/api/servicecontrol/v1 + service_config: servicecontrol.yaml source_roots: - packages/google-cloud-service-control preserve_regex: @@ -1968,10 +2102,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-service-directory version: 1.14.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/servicedirectory/v1 + service_config: servicedirectory_v1.yaml - path: google/cloud/servicedirectory/v1beta1 + service_config: servicedirectory_v1beta1.yaml source_roots: - packages/google-cloud-service-directory preserve_regex: @@ -1987,9 +2123,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-service-management version: 1.13.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/api/servicemanagement/v1 + service_config: servicemanagement_v1.yaml source_roots: - packages/google-cloud-service-management preserve_regex: @@ -2005,9 +2142,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-service-usage version: 1.13.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/api/serviceusage/v1 + service_config: serviceusage_v1.yaml source_roots: - packages/google-cloud-service-usage preserve_regex: @@ -2023,9 +2161,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-servicehealth version: 0.1.12 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/servicehealth/v1 + service_config: servicehealth_v1.yaml source_roots: - packages/google-cloud-servicehealth preserve_regex: @@ -2041,9 +2180,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-shell version: 1.12.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/shell/v1 + service_config: cloudshell_v1.yaml source_roots: - packages/google-cloud-shell preserve_regex: @@ -2059,9 +2199,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-source-context version: 1.7.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/devtools/source/v1 + service_config: "" source_roots: - packages/google-cloud-source-context preserve_regex: @@ -2078,11 +2219,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-speech version: 2.33.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/speech/v1 + service_config: speech_v1.yaml - path: google/cloud/speech/v2 + service_config: speech_v2.yaml - path: google/cloud/speech/v1p1beta1 + service_config: speech_v1p1beta1.yaml source_roots: - packages/google-cloud-speech preserve_regex: @@ -2100,9 +2244,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-storage-control version: 1.7.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/storage/control/v2 + service_config: storage_v2.yaml source_roots: - packages/google-cloud-storage-control preserve_regex: @@ -2118,9 +2263,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-storage-transfer version: 1.17.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/storagetransfer/v1 + service_config: storagetransfer_v1.yaml source_roots: - packages/google-cloud-storage-transfer preserve_regex: @@ -2136,9 +2282,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-iam-logging version: 1.4.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/iam/v1/logging + service_config: "" source_roots: - packages/google-cloud-iam-logging preserve_regex: @@ -2155,9 +2302,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-iap version: 1.17.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/iap/v1 + service_config: iap_v1.yaml source_roots: - packages/google-cloud-iap preserve_regex: @@ -2173,9 +2321,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-ids version: 1.10.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/ids/v1 + service_config: ids_v1.yaml source_roots: - packages/google-cloud-ids preserve_regex: @@ -2191,9 +2340,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-kms version: 3.6.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/kms/v1 + service_config: cloudkms_v1.yaml source_roots: - packages/google-cloud-kms preserve_regex: @@ -2209,9 +2359,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-kms-inventory version: 0.2.15 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/kms/inventory/v1 + service_config: kmsinventory_v1.yaml source_roots: - packages/google-cloud-kms-inventory preserve_regex: @@ -2227,11 +2378,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-language version: 2.17.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/language/v1 + service_config: language_v1.yaml - path: google/cloud/language/v1beta2 + service_config: language_v1beta2.yaml - path: google/cloud/language/v2 + service_config: language_v2.yaml source_roots: - packages/google-cloud-language preserve_regex: @@ -2247,9 +2401,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-licensemanager version: 0.1.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/licensemanager/v1 + service_config: licensemanager_v1.yaml source_roots: - packages/google-cloud-licensemanager preserve_regex: @@ -2265,9 +2420,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-life-sciences version: 0.9.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/lifesciences/v2beta + service_config: lifesciences_v2beta.yaml source_roots: - packages/google-cloud-life-sciences preserve_regex: @@ -2283,9 +2439,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-lustre version: 0.1.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/lustre/v1 + service_config: lustre_v1.yaml source_roots: - packages/google-cloud-lustre preserve_regex: @@ -2301,9 +2458,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-maintenance-api version: 0.1.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/maintenance/api/v1beta + service_config: maintenance_v1beta.yaml source_roots: - packages/google-cloud-maintenance-api preserve_regex: @@ -2319,9 +2477,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-managed-identities version: 1.12.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/managedidentities/v1 + service_config: managedidentities_v1.yaml source_roots: - packages/google-cloud-managed-identities preserve_regex: @@ -2337,9 +2496,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-managedkafka version: 0.1.12 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/managedkafka/v1 + service_config: managedkafka_v1.yaml source_roots: - packages/google-cloud-managedkafka preserve_regex: @@ -2355,9 +2515,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-managedkafka-schemaregistry version: 0.1.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/managedkafka/schemaregistry/v1 + service_config: managedkafka_v1.yaml source_roots: - packages/google-cloud-managedkafka-schemaregistry preserve_regex: @@ -2373,9 +2534,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-media-translation version: 0.11.17 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/mediatranslation/v1beta1 + service_config: mediatranslation_v1beta1.yaml source_roots: - packages/google-cloud-media-translation preserve_regex: @@ -2391,10 +2553,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-memcache version: 1.12.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/memcache/v1 + service_config: memcache_v1.yaml - path: google/cloud/memcache/v1beta2 + service_config: memcache_v1beta2.yaml source_roots: - packages/google-cloud-memcache preserve_regex: @@ -2410,10 +2574,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-memorystore version: 0.1.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/memorystore/v1beta + service_config: memorystore_v1beta.yaml - path: google/cloud/memorystore/v1 + service_config: memorystore_v1.yaml source_roots: - packages/google-cloud-memorystore preserve_regex: @@ -2429,9 +2595,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-migrationcenter version: 0.1.15 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/migrationcenter/v1 + service_config: migrationcenter_v1.yaml source_roots: - packages/google-cloud-migrationcenter preserve_regex: @@ -2447,10 +2614,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-modelarmor version: 0.2.8 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/modelarmor/v1beta + service_config: modelarmor_v1beta.yaml - path: google/cloud/modelarmor/v1 + service_config: modelarmor_v1.yaml source_roots: - packages/google-cloud-modelarmor preserve_regex: @@ -2466,9 +2635,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-monitoring-metrics-scopes version: 1.9.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/monitoring/metricsscope/v1 + service_config: monitoring.yaml source_roots: - packages/google-cloud-monitoring-metrics-scopes preserve_regex: @@ -2484,9 +2654,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-netapp version: 0.3.24 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/netapp/v1 + service_config: netapp_v1.yaml source_roots: - packages/google-cloud-netapp preserve_regex: @@ -2502,10 +2673,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-network-connectivity version: 2.10.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/networkconnectivity/v1 + service_config: networkconnectivity_v1.yaml - path: google/cloud/networkconnectivity/v1alpha1 + service_config: networkconnectivity_v1alpha1.yaml source_roots: - packages/google-cloud-network-connectivity preserve_regex: @@ -2521,9 +2694,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-network-management version: 1.28.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/networkmanagement/v1 + service_config: networkmanagement_v1.yaml source_roots: - packages/google-cloud-network-management preserve_regex: @@ -2539,9 +2713,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-network-services version: 0.5.24 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/networkservices/v1 + service_config: networkservices_v1.yaml source_roots: - packages/google-cloud-network-services preserve_regex: @@ -2557,11 +2732,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-notebooks version: 1.13.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/notebooks/v1beta1 + service_config: notebooks_v1beta1.yaml - path: google/cloud/notebooks/v1 + service_config: notebooks_v1.yaml - path: google/cloud/notebooks/v2 + service_config: notebooks_v2.yaml source_roots: - packages/google-cloud-notebooks preserve_regex: @@ -2577,9 +2755,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-optimization version: 1.11.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/optimization/v1 + service_config: cloudoptimization_v1.yaml source_roots: - packages/google-cloud-optimization preserve_regex: @@ -2595,9 +2774,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-oracledatabase version: 0.1.10 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/oracledatabase/v1 + service_config: oracledatabase_v1.yaml source_roots: - packages/google-cloud-oracledatabase preserve_regex: @@ -2613,10 +2793,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-orchestration-airflow version: 1.17.5 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/orchestration/airflow/service/v1 + service_config: composer_v1.yaml - path: google/cloud/orchestration/airflow/service/v1beta1 + service_config: composer_v1beta1.yaml source_roots: - packages/google-cloud-orchestration-airflow preserve_regex: @@ -2632,10 +2814,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-os-config version: 1.21.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/osconfig/v1alpha + service_config: osconfig_v1alpha.yaml - path: google/cloud/osconfig/v1 + service_config: osconfig_v1.yaml source_roots: - packages/google-cloud-os-config preserve_regex: @@ -2651,10 +2835,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-parallelstore version: 0.2.15 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/parallelstore/v1 + service_config: parallelstore_v1.yaml - path: google/cloud/parallelstore/v1beta + service_config: parallelstore_v1beta.yaml source_roots: - packages/google-cloud-parallelstore preserve_regex: @@ -2670,9 +2856,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-parametermanager version: 0.1.5 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/parametermanager/v1 + service_config: parametermanager_v1.yaml source_roots: - packages/google-cloud-parametermanager preserve_regex: @@ -2688,9 +2875,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-phishing-protection version: 1.14.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/phishingprotection/v1beta1 + service_config: phishingprotection_v1beta1.yaml source_roots: - packages/google-cloud-phishing-protection preserve_regex: @@ -2706,9 +2894,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-policy-troubleshooter version: 1.13.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/policytroubleshooter/v1 + service_config: policytroubleshooter_v1.yaml source_roots: - packages/google-cloud-policy-troubleshooter preserve_regex: @@ -2724,9 +2913,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-policysimulator version: 0.1.15 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/policysimulator/v1 + service_config: policysimulator_v1.yaml source_roots: - packages/google-cloud-policysimulator preserve_regex: @@ -2742,9 +2932,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-policytroubleshooter-iam version: 0.1.13 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/policytroubleshooter/iam/v3 + service_config: policytroubleshooter_v3.yaml source_roots: - packages/google-cloud-policytroubleshooter-iam preserve_regex: @@ -2760,10 +2951,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-private-ca version: 1.15.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/security/privateca/v1 + service_config: privateca_v1.yaml - path: google/cloud/security/privateca/v1beta1 + service_config: privateca_v1beta1.yaml source_roots: - packages/google-cloud-private-ca preserve_regex: @@ -2779,9 +2972,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-private-catalog version: 0.9.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/privatecatalog/v1beta1 + service_config: cloudprivatecatalog_v1beta1.yaml source_roots: - packages/google-cloud-private-catalog preserve_regex: @@ -2797,9 +2991,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-privilegedaccessmanager version: 0.1.9 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/privilegedaccessmanager/v1 + service_config: privilegedaccessmanager_v1.yaml source_roots: - packages/google-cloud-privilegedaccessmanager preserve_regex: @@ -2815,10 +3010,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-quotas version: 0.1.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/api/cloudquotas/v1 + service_config: cloudquotas_v1.yaml - path: google/api/cloudquotas/v1beta + service_config: cloudquotas_v1beta.yaml source_roots: - packages/google-cloud-quotas preserve_regex: @@ -2834,9 +3031,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-rapidmigrationassessment version: 0.1.16 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/rapidmigrationassessment/v1 + service_config: rapidmigrationassessment_v1.yaml source_roots: - packages/google-cloud-rapidmigrationassessment preserve_regex: @@ -2852,9 +3050,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-recaptcha-enterprise version: 1.28.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/recaptchaenterprise/v1 + service_config: recaptchaenterprise_v1.yaml source_roots: - packages/google-cloud-recaptcha-enterprise preserve_regex: @@ -2870,9 +3069,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-recommendations-ai version: 0.10.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/recommendationengine/v1beta1 + service_config: recommendationengine_v1beta1.yaml source_roots: - packages/google-cloud-recommendations-ai preserve_regex: @@ -2888,10 +3088,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-recommender version: 2.18.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/recommender/v1beta1 + service_config: recommender_v1beta1.yaml - path: google/cloud/recommender/v1 + service_config: recommender_v1.yaml source_roots: - packages/google-cloud-recommender preserve_regex: @@ -2907,10 +3109,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-redis version: 2.18.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/redis/v1 + service_config: redis_v1.yaml - path: google/cloud/redis/v1beta1 + service_config: redis_v1beta1.yaml source_roots: - packages/google-cloud-redis preserve_regex: @@ -2926,9 +3130,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-locationfinder version: 0.1.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/locationfinder/v1 + service_config: cloudlocationfinder_v1.yaml source_roots: - packages/google-cloud-locationfinder preserve_regex: @@ -2944,9 +3149,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-data-qna version: 0.10.17 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/dataqna/v1alpha + service_config: dataqna_v1alpha.yaml source_roots: - packages/google-cloud-data-qna preserve_regex: @@ -2962,10 +3168,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-datacatalog version: 3.27.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/datacatalog/v1 + service_config: datacatalog_v1.yaml - path: google/cloud/datacatalog/v1beta1 + service_config: datacatalog_v1beta1.yaml source_roots: - packages/google-cloud-datacatalog preserve_regex: @@ -2981,9 +3189,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-datacatalog-lineage version: 0.3.14 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/datacatalog/lineage/v1 + service_config: datalineage_v1.yaml source_roots: - packages/google-cloud-datacatalog-lineage preserve_regex: @@ -2999,9 +3208,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dataflow-client version: 0.9.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/dataflow/v1beta3 + service_config: dataflow_v1beta3.yaml source_roots: - packages/google-cloud-dataflow-client preserve_regex: @@ -3017,10 +3227,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dataform version: 0.6.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/dataform/v1beta1 + service_config: dataform_v1beta1.yaml - path: google/cloud/dataform/v1 + service_config: dataform_v1.yaml source_roots: - packages/google-cloud-dataform preserve_regex: @@ -3036,9 +3248,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-datalabeling version: 1.13.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/datalabeling/v1beta1 + service_config: datalabeling_v1beta1.yaml source_roots: - packages/google-cloud-datalabeling preserve_regex: @@ -3054,9 +3267,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dataplex version: 2.12.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/dataplex/v1 + service_config: dataplex_v1.yaml source_roots: - packages/google-cloud-dataplex preserve_regex: @@ -3072,9 +3286,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dataproc version: 5.22.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/dataproc/v1 + service_config: dataproc_v1.yaml source_roots: - packages/google-cloud-dataproc preserve_regex: @@ -3090,11 +3305,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dataproc-metastore version: 1.19.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/metastore/v1alpha + service_config: metastore_v1alpha.yaml - path: google/cloud/metastore/v1beta + service_config: metastore_v1beta.yaml - path: google/cloud/metastore/v1 + service_config: metastore_v1.yaml source_roots: - packages/google-cloud-dataproc-metastore preserve_regex: @@ -3110,10 +3328,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-datastream version: 1.15.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/datastream/v1 + service_config: datastream_v1.yaml - path: google/cloud/datastream/v1alpha1 + service_config: datastream_v1alpha1.yaml source_roots: - packages/google-cloud-datastream preserve_regex: @@ -3129,9 +3349,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-deploy version: 2.7.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/deploy/v1 + service_config: clouddeploy_v1.yaml source_roots: - packages/google-cloud-deploy preserve_regex: @@ -3147,9 +3368,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-developerconnect version: 0.1.10 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/developerconnect/v1 + service_config: developerconnect_v1.yaml source_roots: - packages/google-cloud-developerconnect preserve_regex: @@ -3165,9 +3387,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-devicestreaming version: 0.1.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/devicestreaming/v1 + service_config: devicestreaming_v1.yaml source_roots: - packages/google-cloud-devicestreaming preserve_regex: @@ -3183,10 +3406,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dialogflow version: 2.41.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/dialogflow/v2beta1 + service_config: dialogflow_v2beta1.yaml - path: google/cloud/dialogflow/v2 + service_config: dialogflow_v2.yaml source_roots: - packages/google-cloud-dialogflow preserve_regex: @@ -3202,11 +3427,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-discoveryengine version: 0.13.12 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/discoveryengine/v1 + service_config: discoveryengine_v1.yaml - path: google/cloud/discoveryengine/v1beta + service_config: discoveryengine_v1beta.yaml - path: google/cloud/discoveryengine/v1alpha + service_config: discoveryengine_v1alpha.yaml source_roots: - packages/google-cloud-discoveryengine preserve_regex: @@ -3222,9 +3450,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-dms version: 1.12.4 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/clouddms/v1 + service_config: datamigration_v1.yaml source_roots: - packages/google-cloud-dms preserve_regex: @@ -3240,10 +3469,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-documentai version: 3.6.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/documentai/v1beta3 + service_config: documentai_v1beta3.yaml - path: google/cloud/documentai/v1 + service_config: documentai_v1.yaml source_roots: - packages/google-cloud-documentai preserve_regex: @@ -3259,10 +3490,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-domains version: 1.10.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/domains/v1beta1 + service_config: domains_v1beta1.yaml - path: google/cloud/domains/v1 + service_config: domains_v1.yaml source_roots: - packages/google-cloud-domains preserve_regex: @@ -3278,9 +3511,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-edgecontainer version: 0.5.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/edgecontainer/v1 + service_config: edgecontainer_v1.yaml source_roots: - packages/google-cloud-edgecontainer preserve_regex: @@ -3296,9 +3530,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-edgenetwork version: 0.1.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/edgenetwork/v1 + service_config: edgenetwork_v1.yaml source_roots: - packages/google-cloud-edgenetwork preserve_regex: @@ -3314,9 +3549,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-enterpriseknowledgegraph version: 0.3.17 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/enterpriseknowledgegraph/v1 + service_config: enterpriseknowledgegraph_v1.yaml source_roots: - packages/google-cloud-enterpriseknowledgegraph preserve_regex: @@ -3332,9 +3568,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-essential-contacts version: 1.10.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/essentialcontacts/v1 + service_config: essentialcontacts_v1.yaml source_roots: - packages/google-cloud-essential-contacts preserve_regex: @@ -3350,9 +3587,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-eventarc-publishing version: 0.6.19 - last_generated_commit: d300b151a973ce0425ae4ad07b3de957ca31bec6 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/eventarc/publishing/v1 + service_config: eventarcpublishing_v1.yaml source_roots: - packages/google-cloud-eventarc-publishing preserve_regex: @@ -3368,9 +3606,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-filestore version: 1.13.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/filestore/v1 + service_config: file_v1.yaml source_roots: - packages/google-cloud-filestore preserve_regex: @@ -3386,9 +3625,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-financialservices version: 0.1.3 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/financialservices/v1 + service_config: financialservices_v1.yaml source_roots: - packages/google-cloud-financialservices preserve_regex: @@ -3404,10 +3644,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-functions version: 1.20.4 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/functions/v2 + service_config: cloudfunctions_v2.yaml - path: google/cloud/functions/v1 + service_config: cloudfunctions_v1.yaml source_roots: - packages/google-cloud-functions preserve_regex: @@ -3423,9 +3665,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-gdchardwaremanagement version: 0.1.13 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/gdchardwaremanagement/v1alpha + service_config: gdchardwaremanagement_v1alpha.yaml source_roots: - packages/google-cloud-gdchardwaremanagement preserve_regex: @@ -3441,10 +3684,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-geminidataanalytics version: 0.4.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/geminidataanalytics/v1beta + service_config: geminidataanalytics_v1beta.yaml - path: google/cloud/geminidataanalytics/v1alpha + service_config: geminidataanalytics_v1alpha.yaml source_roots: - packages/google-cloud-geminidataanalytics preserve_regex: @@ -3460,9 +3705,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-gke-backup version: 0.5.19 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/gkebackup/v1 + service_config: gkebackup_v1.yaml source_roots: - packages/google-cloud-gke-backup preserve_regex: @@ -3478,10 +3724,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-gke-connect-gateway version: 0.10.4 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/gkeconnect/gateway/v1beta1 + service_config: connectgateway_v1beta1.yaml - path: google/cloud/gkeconnect/gateway/v1 + service_config: connectgateway_v1.yaml source_roots: - packages/google-cloud-gke-connect-gateway preserve_regex: @@ -3497,10 +3745,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-cloud-security-publicca version: 0.3.18 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/cloud/security/publicca/v1 + service_config: publicca_v1.yaml - path: google/cloud/security/publicca/v1beta1 + service_config: publicca_v1beta1.yaml source_roots: - packages/google-cloud-security-publicca preserve_regex: @@ -3516,9 +3766,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-css version: 0.1.17 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/css/v1 + service_config: css_v1.yaml source_roots: - packages/google-shopping-css preserve_regex: @@ -3534,10 +3785,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-accounts version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/accounts/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/accounts/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-accounts preserve_regex: @@ -3553,10 +3806,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-conversions version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/conversions/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/conversions/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-conversions preserve_regex: @@ -3572,10 +3827,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-datasources version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/datasources/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/datasources/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-datasources preserve_regex: @@ -3591,10 +3848,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-inventories version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/inventories/v1beta + service_config: merchantapi_v1beta.yaml - path: google/shopping/merchant/inventories/v1 + service_config: merchantapi_v1.yaml source_roots: - packages/google-shopping-merchant-inventories preserve_regex: @@ -3610,10 +3869,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-issueresolution version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/issueresolution/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/issueresolution/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-issueresolution preserve_regex: @@ -3629,10 +3890,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-lfp version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/lfp/v1beta + service_config: merchantapi_v1beta.yaml - path: google/shopping/merchant/lfp/v1 + service_config: merchantapi_v1.yaml source_roots: - packages/google-shopping-merchant-lfp preserve_regex: @@ -3648,10 +3911,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-notifications version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/notifications/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/notifications/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-notifications preserve_regex: @@ -3667,10 +3932,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-ordertracking version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/ordertracking/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/ordertracking/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-ordertracking preserve_regex: @@ -3686,10 +3953,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-products version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/products/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/products/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-products preserve_regex: @@ -3705,9 +3974,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-productstudio version: 0.1.1 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/productstudio/v1alpha + service_config: merchantapi_v1alpha.yaml source_roots: - packages/google-shopping-merchant-productstudio preserve_regex: @@ -3723,10 +3993,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-promotions version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/promotions/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/promotions/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-promotions preserve_regex: @@ -3742,10 +4014,12 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-quota version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/quota/v1beta + service_config: merchantapi_v1beta.yaml - path: google/shopping/merchant/quota/v1 + service_config: merchantapi_v1.yaml source_roots: - packages/google-shopping-merchant-quota preserve_regex: @@ -3761,11 +4035,14 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-reports version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/reports/v1beta + service_config: merchantapi_v1beta.yaml - path: google/shopping/merchant/reports/v1 + service_config: merchantapi_v1.yaml - path: google/shopping/merchant/reports/v1alpha + service_config: merchantapi_v1alpha.yaml source_roots: - packages/google-shopping-merchant-reports preserve_regex: @@ -3781,9 +4058,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-merchant-reviews version: 0.2.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/merchant/reviews/v1beta + service_config: merchantapi_v1beta.yaml source_roots: - packages/google-shopping-merchant-reviews preserve_regex: @@ -3799,9 +4077,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-shopping-type version: 1.0.0 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/shopping/type + service_config: "" source_roots: - packages/google-shopping-type preserve_regex: @@ -3818,9 +4097,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-geo-type version: 0.3.13 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/geo/type + service_config: type_geo.yaml source_roots: - packages/google-geo-type preserve_regex: @@ -3837,9 +4117,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-addressvalidation version: 0.3.20 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/addressvalidation/v1 + service_config: addressvalidation_v1.yaml source_roots: - packages/google-maps-addressvalidation preserve_regex: @@ -3855,9 +4136,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-areainsights version: 0.1.8 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/areainsights/v1 + service_config: areainsights_v1.yaml source_roots: - packages/google-maps-areainsights preserve_regex: @@ -3873,9 +4155,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-fleetengine version: 0.2.11 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/fleetengine/v1 + service_config: fleetengine_v1.yaml source_roots: - packages/google-maps-fleetengine preserve_regex: @@ -3891,9 +4174,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-fleetengine-delivery version: 0.2.13 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/fleetengine/delivery/v1 + service_config: fleetengine_v1.yaml source_roots: - packages/google-maps-fleetengine-delivery preserve_regex: @@ -3909,9 +4193,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-mapsplatformdatasets version: 0.4.8 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/mapsplatformdatasets/v1 + service_config: mapsplatformdatasets_v1.yaml source_roots: - packages/google-maps-mapsplatformdatasets preserve_regex: @@ -3927,9 +4212,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-places version: 0.2.2 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/places/v1 + service_config: places_v1.yaml source_roots: - packages/google-maps-places preserve_regex: @@ -3945,9 +4231,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-routeoptimization version: 0.1.11 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/routeoptimization/v1 + service_config: routeoptimization_v1.yaml source_roots: - packages/google-maps-routeoptimization preserve_regex: @@ -3964,9 +4251,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-routing version: 0.6.16 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/routing/v2 + service_config: routes_v2.yaml source_roots: - packages/google-maps-routing preserve_regex: @@ -3982,9 +4270,10 @@ libraries: tag_format: '{id}-v{version}' - id: google-maps-solar version: 0.1.9 - last_generated_commit: 9070e63a1f574261c153ef6e94afc55677200337 + last_generated_commit: a53e1e09d16d311ff6064d237abed3ddd7a5d1fe apis: - path: google/maps/solar/v1 + service_config: solar_v1.yaml source_roots: - packages/google-maps-solar preserve_regex: diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py index 6cbd57878a89..dddd8e9b95d1 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/async_client.py @@ -91,6 +91,8 @@ class AlloyDBAdminAsyncClient: parse_connection_info_path = staticmethod( AlloyDBAdminClient.parse_connection_info_path ) + crypto_key_path = staticmethod(AlloyDBAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(AlloyDBAdminClient.parse_crypto_key_path) crypto_key_version_path = staticmethod(AlloyDBAdminClient.crypto_key_version_path) parse_crypto_key_version_path = staticmethod( AlloyDBAdminClient.parse_crypto_key_version_path @@ -101,6 +103,10 @@ class AlloyDBAdminAsyncClient: parse_instance_path = staticmethod(AlloyDBAdminClient.parse_instance_path) network_path = staticmethod(AlloyDBAdminClient.network_path) parse_network_path = staticmethod(AlloyDBAdminClient.parse_network_path) + service_attachment_path = staticmethod(AlloyDBAdminClient.service_attachment_path) + parse_service_attachment_path = staticmethod( + AlloyDBAdminClient.parse_service_attachment_path + ) supported_database_flag_path = staticmethod( AlloyDBAdminClient.supported_database_flag_path ) @@ -1243,7 +1249,7 @@ async def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request @@ -5142,8 +5148,7 @@ async def sample_list_databases(): Args: request (Optional[Union[google.cloud.alloydb_v1.types.ListDatabasesRequest, dict]]): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. parent (:class:`str`): Required. Parent value for ListDatabasesRequest. @@ -5161,8 +5166,8 @@ async def sample_list_databases(): Returns: google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListDatabasesAsyncPager: - Message for response to listing - Databases. + Message for ListDatabases response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py index db78adc65fcd..ed729794d70c 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/client.py @@ -272,6 +272,30 @@ def parse_connection_info_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def crypto_key_version_path( project: str, @@ -365,6 +389,28 @@ def parse_network_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def service_attachment_path( + project: str, + region: str, + service_attachment: str, + ) -> str: + """Returns a fully-qualified service_attachment string.""" + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + + @staticmethod + def parse_service_attachment_path(path: str) -> Dict[str, str]: + """Parses a service_attachment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def supported_database_flag_path( project: str, @@ -1808,7 +1854,7 @@ def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request @@ -5627,8 +5673,7 @@ def sample_list_databases(): Args: request (Union[google.cloud.alloydb_v1.types.ListDatabasesRequest, dict]): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. parent (str): Required. Parent value for ListDatabasesRequest. @@ -5646,8 +5691,8 @@ def sample_list_databases(): Returns: google.cloud.alloydb_v1.services.alloy_db_admin.pagers.ListDatabasesPager: - Message for response to listing - Databases. + Message for ListDatabases response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py index b7e3da27a774..ba5276f9572b 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_db_admin/transports/rest.py @@ -5985,8 +5985,7 @@ def __call__( Args: request (~.service.ListDatabasesRequest): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5997,9 +5996,7 @@ def __call__( Returns: ~.service.ListDatabasesResponse: - Message for response to listing - Databases. - + Message for ListDatabases response. """ http_options = ( diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_dbcsql_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_dbcsql_admin/async_client.py index ce04112e1a4e..6ba3260e8da6 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_dbcsql_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_dbcsql_admin/async_client.py @@ -83,6 +83,8 @@ class AlloyDBCSQLAdminAsyncClient: parse_backup_path = staticmethod(AlloyDBCSQLAdminClient.parse_backup_path) cluster_path = staticmethod(AlloyDBCSQLAdminClient.cluster_path) parse_cluster_path = staticmethod(AlloyDBCSQLAdminClient.parse_cluster_path) + crypto_key_path = staticmethod(AlloyDBCSQLAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(AlloyDBCSQLAdminClient.parse_crypto_key_path) crypto_key_version_path = staticmethod( AlloyDBCSQLAdminClient.crypto_key_version_path ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_dbcsql_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_dbcsql_admin/client.py index e39d34d16463..1354a08970ee 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_dbcsql_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/services/alloy_dbcsql_admin/client.py @@ -246,6 +246,30 @@ def parse_cluster_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def crypto_key_version_path( project: str, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py index d168894dc59d..497f8c73a573 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/resources.py @@ -119,12 +119,15 @@ class DatabaseVersion(proto.Enum): The database version is Postgres 15. POSTGRES_16 (4): The database version is Postgres 16. + POSTGRES_17 (5): + The database version is Postgres 17. """ DATABASE_VERSION_UNSPECIFIED = 0 POSTGRES_13 = 1 POSTGRES_14 = 2 POSTGRES_15 = 3 POSTGRES_16 = 4 + POSTGRES_17 = 5 class SubscriptionType(proto.Enum): @@ -939,14 +942,9 @@ class State(proto.Enum): READY (1): The cluster is active and running. STOPPED (2): - The cluster is stopped. All instances in the - cluster are stopped. Customers can start a - stopped cluster at any point and all their - instances will come back to life with same names - and IP resources. In this state, customer pays - for storage. - Associated backups could also be present in a - stopped cluster. + This is unused. Even when all instances in + the cluster are stopped, the cluster remains in + READY state. EMPTY (3): The cluster is empty and has no associated resources. All instances, associated storage and @@ -1427,6 +1425,9 @@ class Instance(proto.Message): can/cannot be activated (for example, a read pool instance should be stopped before stopping primary etc.). Please refer to the API documentation for more details. + connection_pool_config (google.cloud.alloydb_v1.types.Instance.ConnectionPoolConfig): + Optional. The configuration for Managed + Connection Pool (MCP). """ class State(proto.Enum): @@ -1994,6 +1995,35 @@ class AuthorizedNetwork(proto.Message): number=5, ) + class ConnectionPoolConfig(proto.Message): + r"""Configuration for Managed Connection Pool (MCP). + + Attributes: + enabled (bool): + Optional. Whether to enable Managed + Connection Pool (MCP). + flags (MutableMapping[str, str]): + Optional. Connection Pool flags, as a list of + "key": "value" pairs. + pooler_count (int): + Output only. The number of running poolers + per instance. + """ + + enabled: bool = proto.Field( + proto.BOOL, + number=12, + ) + flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + pooler_count: int = proto.Field( + proto.INT32, + number=14, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -2129,6 +2159,11 @@ class AuthorizedNetwork(proto.Message): number=35, enum=ActivationPolicy, ) + connection_pool_config: ConnectionPoolConfig = proto.Field( + proto.MESSAGE, + number=37, + message=ConnectionPoolConfig, + ) class ConnectionInfo(proto.Message): @@ -2729,19 +2764,31 @@ class UserType(proto.Enum): class Database(proto.Message): r"""Message describing Database object. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Identifier. Name of the resource in the form of ``projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}``. charset (str): - Optional. Charset for the database. This field can contain - any PostgreSQL supported charset name. Example values - include "UTF8", "SQL_ASCII", etc. + Optional. Immutable. Charset for the database. This field + can contain any PostgreSQL supported charset name. Example + values include "UTF8", "SQL_ASCII", etc. collation (str): - Optional. Collation for the database. - Name of the custom or native collation for - postgres. Example values include "C", "POSIX", - etc + Optional. Immutable. lc_collate for the database. String + sort order. Example values include "C", "POSIX", etc. + character_type (str): + Optional. Immutable. lc_ctype for the database. Character + classification (What is a letter? The upper-case + equivalent?). Example values include "C", "POSIX", etc. + database_template (str): + Input only. Immutable. Template of the + database to be used for creating a new database. + is_template_database (bool): + Optional. Whether the database is a template + database. + + This field is a member of `oneof`_ ``_is_template_database``. """ name: str = proto.Field( @@ -2756,6 +2803,19 @@ class Database(proto.Message): proto.STRING, number=3, ) + character_type: str = proto.Field( + proto.STRING, + number=4, + ) + database_template: str = proto.Field( + proto.STRING, + number=6, + ) + is_template_database: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py index 33858f024591..47abef01327c 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1/types/service.py @@ -2023,6 +2023,10 @@ class ExecuteSqlRequest(proto.Message): Required. SQL statement to execute on database. Any valid statement is permitted, including DDL, DML, DQL statements. + validate_only (bool): + Optional. If set, validates the sql statement + by performing syntax and semantic validation and + doesn't execute the query. """ password: str = proto.Field( @@ -2046,6 +2050,10 @@ class ExecuteSqlRequest(proto.Message): proto.STRING, number=4, ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) class ExecuteSqlResponse(proto.Message): @@ -2747,8 +2755,50 @@ class StageStatus(proto.Message): Upgrade stage. state (google.cloud.alloydb_v1.types.UpgradeClusterResponse.Status): State of this stage. + schedule (google.cloud.alloydb_v1.types.UpgradeClusterStatus.StageStatus.StageSchedule): + Output only. Timing information for the stage + execution. """ + class StageSchedule(proto.Message): + r"""Timing information for the stage execution. + + Attributes: + estimated_start_time (google.protobuf.timestamp_pb2.Timestamp): + When the stage is expected to start. Set only + if the stage has not started yet. + actual_start_time (google.protobuf.timestamp_pb2.Timestamp): + Actual start time of the stage. Set only if + the stage has started. + estimated_end_time (google.protobuf.timestamp_pb2.Timestamp): + When the stage is expected to end. Set only + if the stage has not completed yet. + actual_end_time (google.protobuf.timestamp_pb2.Timestamp): + Actual end time of the stage. Set only if the + stage has completed. + """ + + estimated_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + actual_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + estimated_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + actual_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + read_pool_instances_upgrade: "UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus" = proto.Field( proto.MESSAGE, number=11, @@ -2765,6 +2815,11 @@ class StageStatus(proto.Message): number=2, enum="UpgradeClusterResponse.Status", ) + schedule: "UpgradeClusterStatus.StageStatus.StageSchedule" = proto.Field( + proto.MESSAGE, + number=3, + message="UpgradeClusterStatus.StageStatus.StageSchedule", + ) class ReadPoolInstancesUpgradeStageStatus(proto.Message): r"""Read pool instances upgrade specific status. @@ -3102,7 +3157,7 @@ class DeleteUserRequest(proto.Message): class ListDatabasesRequest(proto.Message): - r"""Message for requesting list of Databases. + r"""Message for ListDatabases request. Attributes: parent (str): @@ -3143,11 +3198,11 @@ class ListDatabasesRequest(proto.Message): class ListDatabasesResponse(proto.Message): - r"""Message for response to listing Databases. + r"""Message for ListDatabases response. Attributes: databases (MutableSequence[google.cloud.alloydb_v1.types.Database]): - The list of databases + The list of databases. next_page_token (str): A token identifying the next page of results the server should return. If this field is diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py index 0e19b602a91b..adf3b6ba1e0e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/__init__.py @@ -64,6 +64,7 @@ BatchCreateInstanceStatus, CreateBackupRequest, CreateClusterRequest, + CreateDatabaseRequest, CreateInstanceRequest, CreateInstanceRequests, CreateSecondaryClusterRequest, @@ -138,6 +139,7 @@ "ContinuousBackupSource", "CreateBackupRequest", "CreateClusterRequest", + "CreateDatabaseRequest", "CreateInstanceRequest", "CreateInstanceRequests", "CreateSecondaryClusterRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json index 337a9a99b02d..57b3cfc7d1f5 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/gapic_metadata.json @@ -25,6 +25,11 @@ "create_cluster" ] }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, "CreateInstance": { "methods": [ "create_instance" @@ -215,6 +220,11 @@ "create_cluster" ] }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, "CreateInstance": { "methods": [ "create_instance" @@ -405,6 +415,11 @@ "create_cluster" ] }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, "CreateInstance": { "methods": [ "create_instance" diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py index 4ee7c40fa04a..8fd1652816c5 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/async_client.py @@ -97,6 +97,8 @@ class AlloyDBAdminAsyncClient: parse_connection_info_path = staticmethod( AlloyDBAdminClient.parse_connection_info_path ) + crypto_key_path = staticmethod(AlloyDBAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(AlloyDBAdminClient.parse_crypto_key_path) crypto_key_version_path = staticmethod(AlloyDBAdminClient.crypto_key_version_path) parse_crypto_key_version_path = staticmethod( AlloyDBAdminClient.parse_crypto_key_version_path @@ -107,6 +109,10 @@ class AlloyDBAdminAsyncClient: parse_instance_path = staticmethod(AlloyDBAdminClient.parse_instance_path) network_path = staticmethod(AlloyDBAdminClient.network_path) parse_network_path = staticmethod(AlloyDBAdminClient.parse_network_path) + service_attachment_path = staticmethod(AlloyDBAdminClient.service_attachment_path) + parse_service_attachment_path = staticmethod( + AlloyDBAdminClient.parse_service_attachment_path + ) supported_database_flag_path = staticmethod( AlloyDBAdminClient.supported_database_flag_path ) @@ -1249,7 +1255,7 @@ async def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1alpha.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request @@ -5149,8 +5155,7 @@ async def sample_list_databases(): Args: request (Optional[Union[google.cloud.alloydb_v1alpha.types.ListDatabasesRequest, dict]]): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. parent (:class:`str`): Required. Parent value for ListDatabasesRequest. @@ -5168,8 +5173,8 @@ async def sample_list_databases(): Returns: google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListDatabasesAsyncPager: - Message for response to listing - Databases. + Message for ListDatabases response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5235,6 +5240,132 @@ async def sample_list_databases(): # Done; return the response. return response + async def create_database( + self, + request: Optional[Union[service.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[resources.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Database: + r"""Creates a new Database in a given project, location, + and cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + async def sample_create_database(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = await client.create_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1alpha.types.CreateDatabaseRequest, dict]]): + The request object. Message for CreateDatabase request. + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (:class:`google.cloud.alloydb_v1alpha.types.Database`): + Required. The resource being created. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.alloydb_v1alpha.types.Database: + Message describing Database object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateDatabaseRequest): + request = service.CreateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_database + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py index 0495060a2b43..dec2ce2e6d5d 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/client.py @@ -278,6 +278,30 @@ def parse_connection_info_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def crypto_key_version_path( project: str, @@ -371,6 +395,28 @@ def parse_network_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def service_attachment_path( + project: str, + region: str, + service_attachment: str, + ) -> str: + """Returns a fully-qualified service_attachment string.""" + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + + @staticmethod + def parse_service_attachment_path(path: str) -> Dict[str, str]: + """Parses a service_attachment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def supported_database_flag_path( project: str, @@ -1814,7 +1860,7 @@ def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1alpha.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request @@ -5634,8 +5680,7 @@ def sample_list_databases(): Args: request (Union[google.cloud.alloydb_v1alpha.types.ListDatabasesRequest, dict]): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. parent (str): Required. Parent value for ListDatabasesRequest. @@ -5653,8 +5698,8 @@ def sample_list_databases(): Returns: google.cloud.alloydb_v1alpha.services.alloy_db_admin.pagers.ListDatabasesPager: - Message for response to listing - Databases. + Message for ListDatabases response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5717,6 +5762,129 @@ def sample_list_databases(): # Done; return the response. return response + def create_database( + self, + request: Optional[Union[service.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[resources.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Database: + r"""Creates a new Database in a given project, location, + and cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1alpha + + def sample_create_database(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = client.create_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1alpha.types.CreateDatabaseRequest, dict]): + The request object. Message for CreateDatabase request. + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (google.cloud.alloydb_v1alpha.types.Database): + Required. The resource being created. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.alloydb_v1alpha.types.Database: + Message describing Database object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateDatabaseRequest): + request = service.CreateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AlloyDBAdminClient": return self diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py index 12f1b160346a..bf0ec81c060e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/base.py @@ -421,6 +421,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_database: gapic_v1.method.wrap_method( + self.create_database, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -801,6 +815,15 @@ def list_databases( ]: raise NotImplementedError() + @property + def create_database( + self, + ) -> Callable[ + [service.CreateDatabaseRequest], + Union[resources.Database, Awaitable[resources.Database]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py index 56ea36ebb111..055b741be364 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc.py @@ -1342,6 +1342,33 @@ def list_databases( ) return self._stubs["list_databases"] + @property + def create_database( + self, + ) -> Callable[[service.CreateDatabaseRequest], resources.Database]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Database in a given project, location, + and cluster. + + Returns: + Callable[[~.CreateDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_database" not in self._stubs: + self._stubs["create_database"] = self._logged_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateDatabase", + request_serializer=service.CreateDatabaseRequest.serialize, + response_deserializer=resources.Database.deserialize, + ) + return self._stubs["create_database"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py index daaeba70ef5b..241661c22145 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/grpc_asyncio.py @@ -1380,6 +1380,33 @@ def list_databases( ) return self._stubs["list_databases"] + @property + def create_database( + self, + ) -> Callable[[service.CreateDatabaseRequest], Awaitable[resources.Database]]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Database in a given project, location, + and cluster. + + Returns: + Callable[[~.CreateDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_database" not in self._stubs: + self._stubs["create_database"] = self._logged_channel.unary_unary( + "/google.cloud.alloydb.v1alpha.AlloyDBAdmin/CreateDatabase", + request_serializer=service.CreateDatabaseRequest.serialize, + response_deserializer=resources.Database.deserialize, + ) + return self._stubs["create_database"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1667,6 +1694,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_database: self._wrap_method( + self.create_database, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py index 0411a7be7bff..3703f604cfd8 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest.py @@ -101,6 +101,14 @@ def post_create_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_create_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_database(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -515,6 +523,50 @@ def post_create_cluster_with_metadata( """ return response, metadata + def pre_create_database( + self, + request: service.CreateDatabaseRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_database(self, response: resources.Database) -> resources.Database: + """Post-rpc interceptor for create_database + + DEPRECATED. Please use the `post_create_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. This `post_create_database` interceptor runs + before the `post_create_database_with_metadata` interceptor. + """ + return response + + def post_create_database_with_metadata( + self, + response: resources.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_database_with_metadata` + interceptor in new development instead of the `post_create_database` interceptor. + When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the + `post_create_database` interceptor. The (possibly modified) response returned by + `post_create_database` will be passed to + `post_create_database_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: service.CreateInstanceRequest, @@ -2802,6 +2854,156 @@ def __call__( ) return resp + class _CreateDatabase( + _BaseAlloyDBAdminRestTransport._BaseCreateDatabase, AlloyDBAdminRestStub + ): + def __hash__(self): + return hash("AlloyDBAdminRestTransport.CreateDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.CreateDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Database: + r"""Call the create database method over HTTP. + + Args: + request (~.service.CreateDatabaseRequest): + The request object. Message for CreateDatabase request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resources.Database: + Message describing Database object. + """ + + http_options = ( + _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_database(request, metadata) + transcoded_request = _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_transcoded_request( + http_options, request + ) + + body = _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.alloydb_v1alpha.AlloyDBAdminClient.CreateDatabase", + extra={ + "serviceName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "rpcName": "CreateDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AlloyDBAdminRestTransport._CreateDatabase._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Database() + pb_resp = resources.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_database_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resources.Database.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.alloydb_v1alpha.AlloyDBAdminClient.create_database", + extra={ + "serviceName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "rpcName": "CreateDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _CreateInstance( _BaseAlloyDBAdminRestTransport._BaseCreateInstance, AlloyDBAdminRestStub ): @@ -5985,8 +6187,7 @@ def __call__( Args: request (~.service.ListDatabasesRequest): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5997,9 +6198,7 @@ def __call__( Returns: ~.service.ListDatabasesResponse: - Message for response to listing - Databases. - + Message for ListDatabases response. """ http_options = ( @@ -7943,6 +8142,14 @@ def create_cluster( # In C++ this would require a dynamic_cast return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def create_database( + self, + ) -> Callable[[service.CreateDatabaseRequest], resources.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore + @property def create_instance( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest_base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest_base.py index 35b150d68ced..bcac90f9df8e 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest_base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_db_admin/transports/rest_base.py @@ -267,6 +267,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "databaseId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1alpha/{parent=projects/*/locations/*/clusters/*}/databases", + "body": "database", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CreateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_dbcsql_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_dbcsql_admin/async_client.py index ab49f9ad3b68..fb1f49ffb1bc 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_dbcsql_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_dbcsql_admin/async_client.py @@ -83,6 +83,8 @@ class AlloyDBCSQLAdminAsyncClient: parse_backup_path = staticmethod(AlloyDBCSQLAdminClient.parse_backup_path) cluster_path = staticmethod(AlloyDBCSQLAdminClient.cluster_path) parse_cluster_path = staticmethod(AlloyDBCSQLAdminClient.parse_cluster_path) + crypto_key_path = staticmethod(AlloyDBCSQLAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(AlloyDBCSQLAdminClient.parse_crypto_key_path) crypto_key_version_path = staticmethod( AlloyDBCSQLAdminClient.crypto_key_version_path ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_dbcsql_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_dbcsql_admin/client.py index 16dca9c2e7ac..cab4b18dc095 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_dbcsql_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/services/alloy_dbcsql_admin/client.py @@ -246,6 +246,30 @@ def parse_cluster_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def crypto_key_version_path( project: str, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py index 2cc30d73cb54..2b5e40149a56 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/__init__.py @@ -54,6 +54,7 @@ BatchCreateInstanceStatus, CreateBackupRequest, CreateClusterRequest, + CreateDatabaseRequest, CreateInstanceRequest, CreateInstanceRequests, CreateSecondaryClusterRequest, @@ -147,6 +148,7 @@ "BatchCreateInstanceStatus", "CreateBackupRequest", "CreateClusterRequest", + "CreateDatabaseRequest", "CreateInstanceRequest", "CreateInstanceRequests", "CreateSecondaryClusterRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py index 174446710581..afa122a10797 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/resources.py @@ -119,12 +119,15 @@ class DatabaseVersion(proto.Enum): The database version is Postgres 15. POSTGRES_16 (4): The database version is Postgres 16. + POSTGRES_17 (5): + The database version is Postgres 17. """ DATABASE_VERSION_UNSPECIFIED = 0 POSTGRES_13 = 1 POSTGRES_14 = 2 POSTGRES_15 = 3 POSTGRES_16 = 4 + POSTGRES_17 = 5 class SubscriptionType(proto.Enum): @@ -935,11 +938,11 @@ class Cluster(proto.Message): "123/costCenter": "marketing". service_account_email (str): Output only. AlloyDB per-cluster service - agent email. This service account is created + account. This service account is created per-cluster per-project, and is different from - that of the primary service agent which is - created per-project. The service account naming - format is subject to change. + the per-project service account. The per-cluster + service account naming format is subject to + change. """ class State(proto.Enum): @@ -951,14 +954,9 @@ class State(proto.Enum): READY (1): The cluster is active and running. STOPPED (2): - The cluster is stopped. All instances in the - cluster are stopped. Customers can start a - stopped cluster at any point and all their - instances will come back to life with same names - and IP resources. In this state, customer pays - for storage. - Associated backups could also be present in a - stopped cluster. + This is unused. Even when all instances in + the cluster are stopped, the cluster remains in + READY state. EMPTY (3): The cluster is empty and has no associated resources. All instances, associated storage and @@ -2094,30 +2092,27 @@ class ConnectionPoolConfig(proto.Message): enabled (bool): Optional. Whether to enable Managed Connection Pool (MCP). + flags (MutableMapping[str, str]): + Optional. Connection Pool flags, as a list of + "key": "value" pairs. + pooler_count (int): + Output only. The number of running poolers + per instance. """ - class PoolMode(proto.Enum): - r"""The pool mode. Defaults to ``POOL_MODE_TRANSACTION``. - - Values: - POOL_MODE_UNSPECIFIED (0): - The pool mode is not specified. Defaults to - ``POOL_MODE_TRANSACTION``. - POOL_MODE_SESSION (1): - Server is released back to pool after a - client disconnects. - POOL_MODE_TRANSACTION (2): - Server is released back to pool after a - transaction finishes. - """ - POOL_MODE_UNSPECIFIED = 0 - POOL_MODE_SESSION = 1 - POOL_MODE_TRANSACTION = 2 - enabled: bool = proto.Field( proto.BOOL, number=12, ) + flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + pooler_count: int = proto.Field( + proto.INT32, + number=14, + ) name: str = proto.Field( proto.STRING, @@ -2900,19 +2895,34 @@ class UserType(proto.Enum): class Database(proto.Message): r"""Message describing Database object. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Identifier. Name of the resource in the form of ``projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}``. charset (str): - Optional. Charset for the database. This field can contain - any PostgreSQL supported charset name. Example values - include "UTF8", "SQL_ASCII", etc. + Optional. Immutable. Charset for the database. This field + can contain any PostgreSQL supported charset name. Example + values include "UTF8", "SQL_ASCII", etc. collation (str): - Optional. Collation for the database. - Name of the custom or native collation for - postgres. Example values include "C", "POSIX", - etc + Optional. Immutable. lc_collate for the database. String + sort order. Example values include "C", "POSIX", etc. + character_type (str): + Optional. Immutable. lc_ctype for the database. Character + classification (What is a letter? The upper-case + equivalent?). Example values include "C", "POSIX", etc. + is_template (bool): + Optional. Whether the database is a template database. + Deprecated in favor of is_template_database. + database_template (str): + Input only. Immutable. Template of the + database to be used for creating a new database. + is_template_database (bool): + Optional. Whether the database is a template + database. + + This field is a member of `oneof`_ ``_is_template_database``. """ name: str = proto.Field( @@ -2927,6 +2937,23 @@ class Database(proto.Message): proto.STRING, number=3, ) + character_type: str = proto.Field( + proto.STRING, + number=4, + ) + is_template: bool = proto.Field( + proto.BOOL, + number=5, + ) + database_template: str = proto.Field( + proto.STRING, + number=6, + ) + is_template_database: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py index 33be35df99e0..73bd04b2f542 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1alpha/types/service.py @@ -85,6 +85,7 @@ "DeleteUserRequest", "ListDatabasesRequest", "ListDatabasesResponse", + "CreateDatabaseRequest", }, ) @@ -2024,6 +2025,10 @@ class ExecuteSqlRequest(proto.Message): Required. SQL statement to execute on database. Any valid statement is permitted, including DDL, DML, DQL statements. + validate_only (bool): + Optional. If set, validates the sql statement + by performing syntax and semantic validation and + doesn't execute the query. """ password: str = proto.Field( @@ -2047,6 +2052,10 @@ class ExecuteSqlRequest(proto.Message): proto.STRING, number=4, ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) class ExecuteSqlResponse(proto.Message): @@ -2820,8 +2829,50 @@ class StageStatus(proto.Message): Upgrade stage. state (google.cloud.alloydb_v1alpha.types.UpgradeClusterResponse.Status): State of this stage. + schedule (google.cloud.alloydb_v1alpha.types.UpgradeClusterStatus.StageStatus.StageSchedule): + Output only. Timing information for the stage + execution. """ + class StageSchedule(proto.Message): + r"""Timing information for the stage execution. + + Attributes: + estimated_start_time (google.protobuf.timestamp_pb2.Timestamp): + When the stage is expected to start. Set only + if the stage has not started yet. + actual_start_time (google.protobuf.timestamp_pb2.Timestamp): + Actual start time of the stage. Set only if + the stage has started. + estimated_end_time (google.protobuf.timestamp_pb2.Timestamp): + When the stage is expected to end. Set only + if the stage has not completed yet. + actual_end_time (google.protobuf.timestamp_pb2.Timestamp): + Actual end time of the stage. Set only if the + stage has completed. + """ + + estimated_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + actual_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + estimated_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + actual_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + read_pool_instances_upgrade: "UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus" = proto.Field( proto.MESSAGE, number=11, @@ -2838,6 +2889,11 @@ class StageStatus(proto.Message): number=2, enum="UpgradeClusterResponse.Status", ) + schedule: "UpgradeClusterStatus.StageStatus.StageSchedule" = proto.Field( + proto.MESSAGE, + number=3, + message="UpgradeClusterStatus.StageStatus.StageSchedule", + ) class ReadPoolInstancesUpgradeStageStatus(proto.Message): r"""Read pool instances upgrade specific status. @@ -3175,7 +3231,7 @@ class DeleteUserRequest(proto.Message): class ListDatabasesRequest(proto.Message): - r"""Message for requesting list of Databases. + r"""Message for ListDatabases request. Attributes: parent (str): @@ -3216,11 +3272,11 @@ class ListDatabasesRequest(proto.Message): class ListDatabasesResponse(proto.Message): - r"""Message for response to listing Databases. + r"""Message for ListDatabases response. Attributes: databases (MutableSequence[google.cloud.alloydb_v1alpha.types.Database]): - The list of databases + The list of databases. next_page_token (str): A token identifying the next page of results the server should return. If this field is @@ -3242,4 +3298,31 @@ def raw_page(self): ) +class CreateDatabaseRequest(proto.Message): + r"""Message for CreateDatabase request. + + Attributes: + parent (str): + Required. Value for parent. + database_id (str): + Required. ID of the requesting object. + database (google.cloud.alloydb_v1alpha.types.Database): + Required. The resource being created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + database_id: str = proto.Field( + proto.STRING, + number=2, + ) + database: resources.Database = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Database, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py index 65b194bd5676..b2185a5859e2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/__init__.py @@ -64,6 +64,7 @@ BatchCreateInstanceStatus, CreateBackupRequest, CreateClusterRequest, + CreateDatabaseRequest, CreateInstanceRequest, CreateInstanceRequests, CreateSecondaryClusterRequest, @@ -138,6 +139,7 @@ "ContinuousBackupSource", "CreateBackupRequest", "CreateClusterRequest", + "CreateDatabaseRequest", "CreateInstanceRequest", "CreateInstanceRequests", "CreateSecondaryClusterRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json index 2cdc98f9b8f2..4e57c2cb1a62 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/gapic_metadata.json @@ -25,6 +25,11 @@ "create_cluster" ] }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, "CreateInstance": { "methods": [ "create_instance" @@ -215,6 +220,11 @@ "create_cluster" ] }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, "CreateInstance": { "methods": [ "create_instance" @@ -405,6 +415,11 @@ "create_cluster" ] }, + "CreateDatabase": { + "methods": [ + "create_database" + ] + }, "CreateInstance": { "methods": [ "create_instance" diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py index 2be9fcea5c1d..9a6fe0b23b01 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/async_client.py @@ -97,6 +97,8 @@ class AlloyDBAdminAsyncClient: parse_connection_info_path = staticmethod( AlloyDBAdminClient.parse_connection_info_path ) + crypto_key_path = staticmethod(AlloyDBAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(AlloyDBAdminClient.parse_crypto_key_path) crypto_key_version_path = staticmethod(AlloyDBAdminClient.crypto_key_version_path) parse_crypto_key_version_path = staticmethod( AlloyDBAdminClient.parse_crypto_key_version_path @@ -107,6 +109,10 @@ class AlloyDBAdminAsyncClient: parse_instance_path = staticmethod(AlloyDBAdminClient.parse_instance_path) network_path = staticmethod(AlloyDBAdminClient.network_path) parse_network_path = staticmethod(AlloyDBAdminClient.parse_network_path) + service_attachment_path = staticmethod(AlloyDBAdminClient.service_attachment_path) + parse_service_attachment_path = staticmethod( + AlloyDBAdminClient.parse_service_attachment_path + ) supported_database_flag_path = staticmethod( AlloyDBAdminClient.supported_database_flag_path ) @@ -1249,7 +1255,7 @@ async def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1beta.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request @@ -5149,8 +5155,7 @@ async def sample_list_databases(): Args: request (Optional[Union[google.cloud.alloydb_v1beta.types.ListDatabasesRequest, dict]]): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. parent (:class:`str`): Required. Parent value for ListDatabasesRequest. @@ -5168,8 +5173,8 @@ async def sample_list_databases(): Returns: google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListDatabasesAsyncPager: - Message for response to listing - Databases. + Message for ListDatabases response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5235,6 +5240,132 @@ async def sample_list_databases(): # Done; return the response. return response + async def create_database( + self, + request: Optional[Union[service.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[resources.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Database: + r"""Creates a new Database in a given project, location, + and cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + async def sample_create_database(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = await client.create_database(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.alloydb_v1beta.types.CreateDatabaseRequest, dict]]): + The request object. Message for CreateDatabase request. + parent (:class:`str`): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (:class:`google.cloud.alloydb_v1beta.types.Database`): + Required. The resource being created. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (:class:`str`): + Required. ID of the requesting + object. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.alloydb_v1beta.types.Database: + Message describing Database object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateDatabaseRequest): + request = service.CreateDatabaseRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_database + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py index 5925bb0c1230..fb2b7201c9b3 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/client.py @@ -278,6 +278,30 @@ def parse_connection_info_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def crypto_key_version_path( project: str, @@ -371,6 +395,28 @@ def parse_network_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def service_attachment_path( + project: str, + region: str, + service_attachment: str, + ) -> str: + """Returns a fully-qualified service_attachment string.""" + return "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + + @staticmethod + def parse_service_attachment_path(path: str) -> Dict[str, str]: + """Parses a service_attachment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/regions/(?P.+?)/serviceAttachments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def supported_database_flag_path( project: str, @@ -1814,7 +1860,7 @@ def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1beta.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request @@ -5634,8 +5680,7 @@ def sample_list_databases(): Args: request (Union[google.cloud.alloydb_v1beta.types.ListDatabasesRequest, dict]): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. parent (str): Required. Parent value for ListDatabasesRequest. @@ -5653,8 +5698,8 @@ def sample_list_databases(): Returns: google.cloud.alloydb_v1beta.services.alloy_db_admin.pagers.ListDatabasesPager: - Message for response to listing - Databases. + Message for ListDatabases response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -5717,6 +5762,129 @@ def sample_list_databases(): # Done; return the response. return response + def create_database( + self, + request: Optional[Union[service.CreateDatabaseRequest, dict]] = None, + *, + parent: Optional[str] = None, + database: Optional[resources.Database] = None, + database_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Database: + r"""Creates a new Database in a given project, location, + and cluster. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import alloydb_v1beta + + def sample_create_database(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = client.create_database(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.alloydb_v1beta.types.CreateDatabaseRequest, dict]): + The request object. Message for CreateDatabase request. + parent (str): + Required. Value for parent. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database (google.cloud.alloydb_v1beta.types.Database): + Required. The resource being created. + This corresponds to the ``database`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + database_id (str): + Required. ID of the requesting + object. + + This corresponds to the ``database_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.alloydb_v1beta.types.Database: + Message describing Database object. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, database, database_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, service.CreateDatabaseRequest): + request = service.CreateDatabaseRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if database is not None: + request.database = database + if database_id is not None: + request.database_id = database_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_database] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AlloyDBAdminClient": return self diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py index d02841acc473..7eb685d62ae8 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/base.py @@ -430,6 +430,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_database: gapic_v1.method.wrap_method( + self.create_database, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -810,6 +824,15 @@ def list_databases( ]: raise NotImplementedError() + @property + def create_database( + self, + ) -> Callable[ + [service.CreateDatabaseRequest], + Union[resources.Database, Awaitable[resources.Database]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py index 5f67e4d4d55c..054bd05869c4 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc.py @@ -1342,6 +1342,33 @@ def list_databases( ) return self._stubs["list_databases"] + @property + def create_database( + self, + ) -> Callable[[service.CreateDatabaseRequest], resources.Database]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Database in a given project, location, + and cluster. + + Returns: + Callable[[~.CreateDatabaseRequest], + ~.Database]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_database" not in self._stubs: + self._stubs["create_database"] = self._logged_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateDatabase", + request_serializer=service.CreateDatabaseRequest.serialize, + response_deserializer=resources.Database.deserialize, + ) + return self._stubs["create_database"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py index 56f05d02eda9..305dc1327cba 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/grpc_asyncio.py @@ -1380,6 +1380,33 @@ def list_databases( ) return self._stubs["list_databases"] + @property + def create_database( + self, + ) -> Callable[[service.CreateDatabaseRequest], Awaitable[resources.Database]]: + r"""Return a callable for the create database method over gRPC. + + Creates a new Database in a given project, location, + and cluster. + + Returns: + Callable[[~.CreateDatabaseRequest], + Awaitable[~.Database]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_database" not in self._stubs: + self._stubs["create_database"] = self._logged_channel.unary_unary( + "/google.cloud.alloydb.v1beta.AlloyDBAdmin/CreateDatabase", + request_serializer=service.CreateDatabaseRequest.serialize, + response_deserializer=resources.Database.deserialize, + ) + return self._stubs["create_database"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1676,6 +1703,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_database: self._wrap_method( + self.create_database, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py index c30aa5bebb56..38ceb1f2ec50 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest.py @@ -101,6 +101,14 @@ def post_create_cluster(self, response): logging.log(f"Received response: {response}") return response + def pre_create_database(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_database(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -515,6 +523,50 @@ def post_create_cluster_with_metadata( """ return response, metadata + def pre_create_database( + self, + request: service.CreateDatabaseRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[service.CreateDatabaseRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_database + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlloyDBAdmin server. + """ + return request, metadata + + def post_create_database(self, response: resources.Database) -> resources.Database: + """Post-rpc interceptor for create_database + + DEPRECATED. Please use the `post_create_database_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AlloyDBAdmin server but before + it is returned to user code. This `post_create_database` interceptor runs + before the `post_create_database_with_metadata` interceptor. + """ + return response + + def post_create_database_with_metadata( + self, + response: resources.Database, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[resources.Database, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_database + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlloyDBAdmin server but before it is returned to user code. + + We recommend only using this `post_create_database_with_metadata` + interceptor in new development instead of the `post_create_database` interceptor. + When both interceptors are used, this `post_create_database_with_metadata` interceptor runs after the + `post_create_database` interceptor. The (possibly modified) response returned by + `post_create_database` will be passed to + `post_create_database_with_metadata`. + """ + return response, metadata + def pre_create_instance( self, request: service.CreateInstanceRequest, @@ -2801,6 +2853,156 @@ def __call__( ) return resp + class _CreateDatabase( + _BaseAlloyDBAdminRestTransport._BaseCreateDatabase, AlloyDBAdminRestStub + ): + def __hash__(self): + return hash("AlloyDBAdminRestTransport.CreateDatabase") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: service.CreateDatabaseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.Database: + r"""Call the create database method over HTTP. + + Args: + request (~.service.CreateDatabaseRequest): + The request object. Message for CreateDatabase request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resources.Database: + Message describing Database object. + """ + + http_options = ( + _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_database(request, metadata) + transcoded_request = _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_transcoded_request( + http_options, request + ) + + body = _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.alloydb_v1beta.AlloyDBAdminClient.CreateDatabase", + extra={ + "serviceName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "rpcName": "CreateDatabase", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AlloyDBAdminRestTransport._CreateDatabase._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.Database() + pb_resp = resources.Database.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_database(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_database_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resources.Database.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.alloydb_v1beta.AlloyDBAdminClient.create_database", + extra={ + "serviceName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "rpcName": "CreateDatabase", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _CreateInstance( _BaseAlloyDBAdminRestTransport._BaseCreateInstance, AlloyDBAdminRestStub ): @@ -5984,8 +6186,7 @@ def __call__( Args: request (~.service.ListDatabasesRequest): - The request object. Message for requesting list of - Databases. + The request object. Message for ListDatabases request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -5996,9 +6197,7 @@ def __call__( Returns: ~.service.ListDatabasesResponse: - Message for response to listing - Databases. - + Message for ListDatabases response. """ http_options = ( @@ -7942,6 +8141,14 @@ def create_cluster( # In C++ this would require a dynamic_cast return self._CreateCluster(self._session, self._host, self._interceptor) # type: ignore + @property + def create_database( + self, + ) -> Callable[[service.CreateDatabaseRequest], resources.Database]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDatabase(self._session, self._host, self._interceptor) # type: ignore + @property def create_instance( self, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest_base.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest_base.py index 28cd9f7f9f0e..b1b21fb06944 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest_base.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_db_admin/transports/rest_base.py @@ -267,6 +267,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateDatabase: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "databaseId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/clusters/*}/databases", + "body": "database", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = service.CreateDatabaseRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlloyDBAdminRestTransport._BaseCreateDatabase._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateInstance: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_dbcsql_admin/async_client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_dbcsql_admin/async_client.py index e90a4d977cec..924dc8b77cce 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_dbcsql_admin/async_client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_dbcsql_admin/async_client.py @@ -83,6 +83,8 @@ class AlloyDBCSQLAdminAsyncClient: parse_backup_path = staticmethod(AlloyDBCSQLAdminClient.parse_backup_path) cluster_path = staticmethod(AlloyDBCSQLAdminClient.cluster_path) parse_cluster_path = staticmethod(AlloyDBCSQLAdminClient.parse_cluster_path) + crypto_key_path = staticmethod(AlloyDBCSQLAdminClient.crypto_key_path) + parse_crypto_key_path = staticmethod(AlloyDBCSQLAdminClient.parse_crypto_key_path) crypto_key_version_path = staticmethod( AlloyDBCSQLAdminClient.crypto_key_version_path ) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_dbcsql_admin/client.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_dbcsql_admin/client.py index 3564157670d0..a94e06d4e7f2 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_dbcsql_admin/client.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/services/alloy_dbcsql_admin/client.py @@ -246,6 +246,30 @@ def parse_cluster_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def crypto_key_version_path( project: str, diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py index 2cc30d73cb54..2b5e40149a56 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/__init__.py @@ -54,6 +54,7 @@ BatchCreateInstanceStatus, CreateBackupRequest, CreateClusterRequest, + CreateDatabaseRequest, CreateInstanceRequest, CreateInstanceRequests, CreateSecondaryClusterRequest, @@ -147,6 +148,7 @@ "BatchCreateInstanceStatus", "CreateBackupRequest", "CreateClusterRequest", + "CreateDatabaseRequest", "CreateInstanceRequest", "CreateInstanceRequests", "CreateSecondaryClusterRequest", diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py index bd6e76a59956..57af831c16e4 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/resources.py @@ -119,12 +119,15 @@ class DatabaseVersion(proto.Enum): The database version is Postgres 15. POSTGRES_16 (4): The database version is Postgres 16. + POSTGRES_17 (5): + The database version is Postgres 17. """ DATABASE_VERSION_UNSPECIFIED = 0 POSTGRES_13 = 1 POSTGRES_14 = 2 POSTGRES_15 = 3 POSTGRES_16 = 4 + POSTGRES_17 = 5 class SubscriptionType(proto.Enum): @@ -933,11 +936,11 @@ class Cluster(proto.Message): "123/costCenter": "marketing". service_account_email (str): Output only. AlloyDB per-cluster service - agent email. This service account is created + account. This service account is created per-cluster per-project, and is different from - that of the primary service agent which is - created per-project. The service account naming - format is subject to change. + the per-project service account. The per-cluster + service account naming format is subject to + change. """ class State(proto.Enum): @@ -949,14 +952,9 @@ class State(proto.Enum): READY (1): The cluster is active and running. STOPPED (2): - The cluster is stopped. All instances in the - cluster are stopped. Customers can start a - stopped cluster at any point and all their - instances will come back to life with same names - and IP resources. In this state, customer pays - for storage. - Associated backups could also be present in a - stopped cluster. + This is unused. Even when all instances in + the cluster are stopped, the cluster remains in + READY state. EMPTY (3): The cluster is empty and has no associated resources. All instances, associated storage and @@ -2089,26 +2087,11 @@ class ConnectionPoolConfig(proto.Message): flags (MutableMapping[str, str]): Optional. Connection Pool flags, as a list of "key": "value" pairs. + pooler_count (int): + Output only. The number of running poolers + per instance. """ - class PoolMode(proto.Enum): - r"""The pool mode. Defaults to ``POOL_MODE_TRANSACTION``. - - Values: - POOL_MODE_UNSPECIFIED (0): - The pool mode is not specified. Defaults to - ``POOL_MODE_TRANSACTION``. - POOL_MODE_SESSION (1): - Server is released back to pool after a - client disconnects. - POOL_MODE_TRANSACTION (2): - Server is released back to pool after a - transaction finishes. - """ - POOL_MODE_UNSPECIFIED = 0 - POOL_MODE_SESSION = 1 - POOL_MODE_TRANSACTION = 2 - enabled: bool = proto.Field( proto.BOOL, number=12, @@ -2118,6 +2101,10 @@ class PoolMode(proto.Enum): proto.STRING, number=13, ) + pooler_count: int = proto.Field( + proto.INT32, + number=14, + ) name: str = proto.Field( proto.STRING, @@ -2890,19 +2877,34 @@ class UserType(proto.Enum): class Database(proto.Message): r"""Message describing Database object. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Identifier. Name of the resource in the form of ``projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}``. charset (str): - Optional. Charset for the database. This field can contain - any PostgreSQL supported charset name. Example values - include "UTF8", "SQL_ASCII", etc. + Optional. Immutable. Charset for the database. This field + can contain any PostgreSQL supported charset name. Example + values include "UTF8", "SQL_ASCII", etc. collation (str): - Optional. Collation for the database. - Name of the custom or native collation for - postgres. Example values include "C", "POSIX", - etc + Optional. Immutable. lc_collate for the database. String + sort order. Example values include "C", "POSIX", etc. + character_type (str): + Optional. Immutable. lc_ctype for the database. Character + classification (What is a letter? The upper-case + equivalent?). Example values include "C", "POSIX", etc. + is_template (bool): + Optional. Whether the database is a template database. + Deprecated in favor of is_template_database. + database_template (str): + Input only. Immutable. Template of the + database to be used for creating a new database. + is_template_database (bool): + Optional. Whether the database is a template + database. + + This field is a member of `oneof`_ ``_is_template_database``. """ name: str = proto.Field( @@ -2917,6 +2919,23 @@ class Database(proto.Message): proto.STRING, number=3, ) + character_type: str = proto.Field( + proto.STRING, + number=4, + ) + is_template: bool = proto.Field( + proto.BOOL, + number=5, + ) + database_template: str = proto.Field( + proto.STRING, + number=6, + ) + is_template_database: bool = proto.Field( + proto.BOOL, + number=7, + optional=True, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py index 36e66bd48724..753af81c3709 100644 --- a/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py +++ b/packages/google-cloud-alloydb/google/cloud/alloydb_v1beta/types/service.py @@ -85,6 +85,7 @@ "DeleteUserRequest", "ListDatabasesRequest", "ListDatabasesResponse", + "CreateDatabaseRequest", }, ) @@ -2024,6 +2025,10 @@ class ExecuteSqlRequest(proto.Message): Required. SQL statement to execute on database. Any valid statement is permitted, including DDL, DML, DQL statements. + validate_only (bool): + Optional. If set, validates the sql statement + by performing syntax and semantic validation and + doesn't execute the query. """ password: str = proto.Field( @@ -2047,6 +2052,10 @@ class ExecuteSqlRequest(proto.Message): proto.STRING, number=4, ) + validate_only: bool = proto.Field( + proto.BOOL, + number=6, + ) class ExecuteSqlResponse(proto.Message): @@ -2820,8 +2829,50 @@ class StageStatus(proto.Message): Upgrade stage. state (google.cloud.alloydb_v1beta.types.UpgradeClusterResponse.Status): State of this stage. + schedule (google.cloud.alloydb_v1beta.types.UpgradeClusterStatus.StageStatus.StageSchedule): + Output only. Timing information for the stage + execution. """ + class StageSchedule(proto.Message): + r"""Timing information for the stage execution. + + Attributes: + estimated_start_time (google.protobuf.timestamp_pb2.Timestamp): + When the stage is expected to start. Set only + if the stage has not started yet. + actual_start_time (google.protobuf.timestamp_pb2.Timestamp): + Actual start time of the stage. Set only if + the stage has started. + estimated_end_time (google.protobuf.timestamp_pb2.Timestamp): + When the stage is expected to end. Set only + if the stage has not completed yet. + actual_end_time (google.protobuf.timestamp_pb2.Timestamp): + Actual end time of the stage. Set only if the + stage has completed. + """ + + estimated_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + actual_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + estimated_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + actual_end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + read_pool_instances_upgrade: "UpgradeClusterStatus.ReadPoolInstancesUpgradeStageStatus" = proto.Field( proto.MESSAGE, number=11, @@ -2838,6 +2889,11 @@ class StageStatus(proto.Message): number=2, enum="UpgradeClusterResponse.Status", ) + schedule: "UpgradeClusterStatus.StageStatus.StageSchedule" = proto.Field( + proto.MESSAGE, + number=3, + message="UpgradeClusterStatus.StageStatus.StageSchedule", + ) class ReadPoolInstancesUpgradeStageStatus(proto.Message): r"""Read pool instances upgrade specific status. @@ -3175,7 +3231,7 @@ class DeleteUserRequest(proto.Message): class ListDatabasesRequest(proto.Message): - r"""Message for requesting list of Databases. + r"""Message for ListDatabases request. Attributes: parent (str): @@ -3216,11 +3272,11 @@ class ListDatabasesRequest(proto.Message): class ListDatabasesResponse(proto.Message): - r"""Message for response to listing Databases. + r"""Message for ListDatabases response. Attributes: databases (MutableSequence[google.cloud.alloydb_v1beta.types.Database]): - The list of databases + The list of databases. next_page_token (str): A token identifying the next page of results the server should return. If this field is @@ -3242,4 +3298,31 @@ def raw_page(self): ) +class CreateDatabaseRequest(proto.Message): + r"""Message for CreateDatabase request. + + Attributes: + parent (str): + Required. Value for parent. + database_id (str): + Required. ID of the requesting object. + database (google.cloud.alloydb_v1beta.types.Database): + Required. The resource being created. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + database_id: str = proto.Field( + proto.STRING, + number=2, + ) + database: resources.Database = proto.Field( + proto.MESSAGE, + number=3, + message=resources.Database, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_upgrade_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_upgrade_cluster_async.py index b37df6c78db3..bc4576c08edb 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_upgrade_cluster_async.py +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_upgrade_cluster_async.py @@ -41,7 +41,7 @@ async def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_upgrade_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_upgrade_cluster_sync.py index bcd34b8a4bf3..bce3a7cd9046 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_upgrade_cluster_sync.py +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1_generated_alloy_db_admin_upgrade_cluster_sync.py @@ -41,7 +41,7 @@ def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_database_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_database_async.py new file mode 100644 index 000000000000..cbc37bc92d21 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_database_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +async def sample_create_database(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = await client.create_database(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateDatabase_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_database_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_database_sync.py new file mode 100644 index 000000000000..06abaad1ac38 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_create_database_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1alpha_generated_AlloyDBAdmin_CreateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1alpha + + +def sample_create_database(): + # Create a client + client = alloydb_v1alpha.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1alpha.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = client.create_database(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1alpha_generated_AlloyDBAdmin_CreateDatabase_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py index 8570afc3d6f2..96ceec40ce83 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_async.py @@ -41,7 +41,7 @@ async def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1alpha.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py index 14cf4326094d..47b17fe0f05a 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1alpha_generated_alloy_db_admin_upgrade_cluster_sync.py @@ -41,7 +41,7 @@ def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1alpha.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_database_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_database_async.py new file mode 100644 index 000000000000..3f1d71de5488 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_database_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateDatabase_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +async def sample_create_database(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminAsyncClient() + + # Initialize request argument(s) + request = alloydb_v1beta.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = await client.create_database(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateDatabase_async] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_database_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_database_sync.py new file mode 100644 index 000000000000..532cd0aacd82 --- /dev/null +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_create_database_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDatabase +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-alloydb + + +# [START alloydb_v1beta_generated_AlloyDBAdmin_CreateDatabase_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import alloydb_v1beta + + +def sample_create_database(): + # Create a client + client = alloydb_v1beta.AlloyDBAdminClient() + + # Initialize request argument(s) + request = alloydb_v1beta.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Make the request + response = client.create_database(request=request) + + # Handle the response + print(response) + +# [END alloydb_v1beta_generated_AlloyDBAdmin_CreateDatabase_sync] diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py index a50a5c29ba63..d33787d047a1 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_async.py @@ -41,7 +41,7 @@ async def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1beta.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request diff --git a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py index ee7582d04c9e..ac8912f7a571 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py +++ b/packages/google-cloud-alloydb/samples/generated_samples/alloydb_v1beta_generated_alloy_db_admin_upgrade_cluster_sync.py @@ -41,7 +41,7 @@ def sample_upgrade_cluster(): # Initialize request argument(s) request = alloydb_v1beta.UpgradeClusterRequest( name="name_value", - version="POSTGRES_16", + version="POSTGRES_17", ) # Make the request diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json index 1cafd27a4879..aeb6349f4055 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1alpha.json @@ -518,6 +518,183 @@ ], "title": "alloydb_v1alpha_generated_alloy_db_admin_create_cluster_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminAsyncClient.create_database", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateDatabase", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.alloydb_v1alpha.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.Database", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateDatabase_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1alpha.AlloyDBAdminClient.create_database", + "method": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin.CreateDatabase", + "service": { + "fullName": "google.cloud.alloydb.v1alpha.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1alpha.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.alloydb_v1alpha.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.alloydb_v1alpha.types.Database", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "alloydb_v1alpha_generated_alloy_db_admin_create_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1alpha_generated_AlloyDBAdmin_CreateDatabase_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1alpha_generated_alloy_db_admin_create_database_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json index 3e961c3e9d48..9e0e47501ef4 100644 --- a/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json +++ b/packages/google-cloud-alloydb/samples/generated_samples/snippet_metadata_google.cloud.alloydb.v1beta.json @@ -518,6 +518,183 @@ ], "title": "alloydb_v1beta_generated_alloy_db_admin_create_cluster_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient", + "shortName": "AlloyDBAdminAsyncClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminAsyncClient.create_database", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateDatabase", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.alloydb_v1beta.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.Database", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_database_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateDatabase_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_database_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient", + "shortName": "AlloyDBAdminClient" + }, + "fullName": "google.cloud.alloydb_v1beta.AlloyDBAdminClient.create_database", + "method": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin.CreateDatabase", + "service": { + "fullName": "google.cloud.alloydb.v1beta.AlloyDBAdmin", + "shortName": "AlloyDBAdmin" + }, + "shortName": "CreateDatabase" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.alloydb_v1beta.types.CreateDatabaseRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "database", + "type": "google.cloud.alloydb_v1beta.types.Database" + }, + { + "name": "database_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.alloydb_v1beta.types.Database", + "shortName": "create_database" + }, + "description": "Sample for CreateDatabase", + "file": "alloydb_v1beta_generated_alloy_db_admin_create_database_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "alloydb_v1beta_generated_AlloyDBAdmin_CreateDatabase_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "alloydb_v1beta_generated_alloy_db_admin_create_database_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1_keywords.py index 2b7ad0f47590..d47703c5e3e2 100644 --- a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1_keywords.py +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1_keywords.py @@ -50,7 +50,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'delete_cluster': ('name', 'request_id', 'etag', 'validate_only', 'force', ), 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), 'delete_user': ('name', 'request_id', 'validate_only', ), - 'execute_sql': ('instance', 'database', 'sql_statement', 'password', 'user', ), + 'execute_sql': ('instance', 'database', 'sql_statement', 'password', 'user', 'validate_only', ), 'export_cluster': ('gcs_destination', 'name', 'database', 'csv_export_options', 'sql_export_options', ), 'failover_instance': ('name', 'request_id', 'validate_only', ), 'generate_client_certificate': ('parent', 'request_id', 'cert_duration', 'public_key', 'use_metadata_exchange', ), diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py index 537931f9ebf9..591f7aef2826 100644 --- a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1alpha_keywords.py @@ -42,6 +42,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'batch_create_instances': ('parent', 'requests', 'request_id', ), 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', 'validate_only', ), 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), + 'create_database': ('parent', 'database_id', 'database', ), 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), 'create_secondary_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), 'create_secondary_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), @@ -50,7 +51,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'delete_cluster': ('name', 'request_id', 'etag', 'validate_only', 'force', ), 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), 'delete_user': ('name', 'request_id', 'validate_only', ), - 'execute_sql': ('instance', 'database', 'user', 'sql_statement', 'password', ), + 'execute_sql': ('instance', 'database', 'user', 'sql_statement', 'password', 'validate_only', ), 'export_cluster': ('gcs_destination', 'name', 'database', 'csv_export_options', 'sql_export_options', ), 'failover_instance': ('name', 'request_id', 'validate_only', ), 'generate_client_certificate': ('parent', 'request_id', 'pem_csr', 'cert_duration', 'public_key', 'use_metadata_exchange', ), diff --git a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py index 537931f9ebf9..591f7aef2826 100644 --- a/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py +++ b/packages/google-cloud-alloydb/scripts/fixup_alloydb_v1beta_keywords.py @@ -42,6 +42,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'batch_create_instances': ('parent', 'requests', 'request_id', ), 'create_backup': ('parent', 'backup_id', 'backup', 'request_id', 'validate_only', ), 'create_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), + 'create_database': ('parent', 'database_id', 'database', ), 'create_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), 'create_secondary_cluster': ('parent', 'cluster_id', 'cluster', 'request_id', 'validate_only', ), 'create_secondary_instance': ('parent', 'instance_id', 'instance', 'request_id', 'validate_only', ), @@ -50,7 +51,7 @@ class alloydbCallTransformer(cst.CSTTransformer): 'delete_cluster': ('name', 'request_id', 'etag', 'validate_only', 'force', ), 'delete_instance': ('name', 'request_id', 'etag', 'validate_only', ), 'delete_user': ('name', 'request_id', 'validate_only', ), - 'execute_sql': ('instance', 'database', 'user', 'sql_statement', 'password', ), + 'execute_sql': ('instance', 'database', 'user', 'sql_statement', 'password', 'validate_only', ), 'export_cluster': ('gcs_destination', 'name', 'database', 'csv_export_options', 'sql_export_options', ), 'failover_instance': ('name', 'request_id', 'validate_only', ), 'generate_client_certificate': ('parent', 'request_id', 'pem_csr', 'cert_duration', 'public_key', 'use_metadata_exchange', ), diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py index 79b17526adab..d5be9f6d6769 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_db_admin.py @@ -26706,6 +26706,7 @@ def test_create_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -26975,6 +26976,7 @@ def test_create_secondary_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -27259,6 +27261,11 @@ def test_batch_create_instances_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, + "connection_pool_config": { + "enabled": True, + "flags": {}, + "pooler_count": 1305, + }, }, "request_id": "request_id_value", "validate_only": True, @@ -27542,6 +27549,7 @@ def test_update_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -32288,12 +32296,41 @@ def test_parse_connection_info_path(): assert expected == actual -def test_crypto_key_version_path(): +def test_crypto_key_path(): project = "winkle" location = "nautilus" key_ring = "scallop" crypto_key = "abalone" - crypto_key_version = "squid" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = AlloyDBAdminClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "squid", + "location": "clam", + "key_ring": "whelk", + "crypto_key": "octopus", + } + path = AlloyDBAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "oyster" + location = "nudibranch" + key_ring = "cuttlefish" + crypto_key = "mussel" + crypto_key_version = "winkle" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( project=project, location=location, @@ -32309,11 +32346,11 @@ def test_crypto_key_version_path(): def test_parse_crypto_key_version_path(): expected = { - "project": "clam", - "location": "whelk", - "key_ring": "octopus", - "crypto_key": "oyster", - "crypto_key_version": "nudibranch", + "project": "nautilus", + "location": "scallop", + "key_ring": "abalone", + "crypto_key": "squid", + "crypto_key_version": "clam", } path = AlloyDBAdminClient.crypto_key_version_path(**expected) @@ -32323,10 +32360,10 @@ def test_parse_crypto_key_version_path(): def test_database_path(): - project = "cuttlefish" - location = "mussel" - cluster = "winkle" - database = "nautilus" + project = "whelk" + location = "octopus" + cluster = "oyster" + database = "nudibranch" expected = "projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}".format( project=project, location=location, @@ -32339,10 +32376,10 @@ def test_database_path(): def test_parse_database_path(): expected = { - "project": "scallop", - "location": "abalone", - "cluster": "squid", - "database": "clam", + "project": "cuttlefish", + "location": "mussel", + "cluster": "winkle", + "database": "nautilus", } path = AlloyDBAdminClient.database_path(**expected) @@ -32352,10 +32389,10 @@ def test_parse_database_path(): def test_instance_path(): - project = "whelk" - location = "octopus" - cluster = "oyster" - instance = "nudibranch" + project = "scallop" + location = "abalone" + cluster = "squid" + instance = "clam" expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( project=project, location=location, @@ -32368,10 +32405,10 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "cluster": "winkle", - "instance": "nautilus", + "project": "whelk", + "location": "octopus", + "cluster": "oyster", + "instance": "nudibranch", } path = AlloyDBAdminClient.instance_path(**expected) @@ -32381,8 +32418,8 @@ def test_parse_instance_path(): def test_network_path(): - project = "scallop" - network = "abalone" + project = "cuttlefish" + network = "mussel" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -32393,8 +32430,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "squid", - "network": "clam", + "project": "winkle", + "network": "nautilus", } path = AlloyDBAdminClient.network_path(**expected) @@ -32403,10 +32440,38 @@ def test_parse_network_path(): assert expected == actual +def test_service_attachment_path(): + project = "scallop" + region = "abalone" + service_attachment = "squid" + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + actual = AlloyDBAdminClient.service_attachment_path( + project, region, service_attachment + ) + assert expected == actual + + +def test_parse_service_attachment_path(): + expected = { + "project": "clam", + "region": "whelk", + "service_attachment": "octopus", + } + path = AlloyDBAdminClient.service_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_service_attachment_path(path) + assert expected == actual + + def test_supported_database_flag_path(): - project = "whelk" - location = "octopus" - flag = "oyster" + project = "oyster" + location = "nudibranch" + flag = "cuttlefish" expected = "projects/{project}/locations/{location}/flags/{flag}".format( project=project, location=location, @@ -32418,9 +32483,9 @@ def test_supported_database_flag_path(): def test_parse_supported_database_flag_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "flag": "mussel", + "project": "mussel", + "location": "winkle", + "flag": "nautilus", } path = AlloyDBAdminClient.supported_database_flag_path(**expected) @@ -32430,10 +32495,10 @@ def test_parse_supported_database_flag_path(): def test_user_path(): - project = "winkle" - location = "nautilus" - cluster = "scallop" - user = "abalone" + project = "scallop" + location = "abalone" + cluster = "squid" + user = "clam" expected = "projects/{project}/locations/{location}/clusters/{cluster}/users/{user}".format( project=project, location=location, @@ -32446,10 +32511,10 @@ def test_user_path(): def test_parse_user_path(): expected = { - "project": "squid", - "location": "clam", - "cluster": "whelk", - "user": "octopus", + "project": "whelk", + "location": "octopus", + "cluster": "oyster", + "user": "nudibranch", } path = AlloyDBAdminClient.user_path(**expected) @@ -32459,7 +32524,7 @@ def test_parse_user_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -32469,7 +32534,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "mussel", } path = AlloyDBAdminClient.common_billing_account_path(**expected) @@ -32479,7 +32544,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -32489,7 +32554,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nautilus", } path = AlloyDBAdminClient.common_folder_path(**expected) @@ -32499,7 +32564,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -32509,7 +32574,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "abalone", } path = AlloyDBAdminClient.common_organization_path(**expected) @@ -32519,7 +32584,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -32529,7 +32594,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "clam", } path = AlloyDBAdminClient.common_project_path(**expected) @@ -32539,8 +32604,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -32551,8 +32616,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "oyster", + "location": "nudibranch", } path = AlloyDBAdminClient.common_location_path(**expected) diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_dbcsql_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_dbcsql_admin.py index 4535190ea2c8..e772a46d4b9f 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_dbcsql_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1/test_alloy_dbcsql_admin.py @@ -2955,12 +2955,43 @@ def test_parse_cluster_path(): assert expected == actual -def test_crypto_key_version_path(): +def test_crypto_key_path(): project = "squid" location = "clam" key_ring = "whelk" crypto_key = "octopus" - crypto_key_version = "oyster" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = AlloyDBCSQLAdminClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = AlloyDBCSQLAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBCSQLAdminClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "winkle" + location = "nautilus" + key_ring = "scallop" + crypto_key = "abalone" + crypto_key_version = "squid" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( project=project, location=location, @@ -2976,11 +3007,11 @@ def test_crypto_key_version_path(): def test_parse_crypto_key_version_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "key_ring": "mussel", - "crypto_key": "winkle", - "crypto_key_version": "nautilus", + "project": "clam", + "location": "whelk", + "key_ring": "octopus", + "crypto_key": "oyster", + "crypto_key_version": "nudibranch", } path = AlloyDBCSQLAdminClient.crypto_key_version_path(**expected) @@ -2990,8 +3021,8 @@ def test_parse_crypto_key_version_path(): def test_network_path(): - project = "scallop" - network = "abalone" + project = "cuttlefish" + network = "mussel" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -3002,8 +3033,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "squid", - "network": "clam", + "project": "winkle", + "network": "nautilus", } path = AlloyDBCSQLAdminClient.network_path(**expected) @@ -3013,7 +3044,7 @@ def test_parse_network_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3023,7 +3054,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "abalone", } path = AlloyDBCSQLAdminClient.common_billing_account_path(**expected) @@ -3033,7 +3064,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -3043,7 +3074,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "clam", } path = AlloyDBCSQLAdminClient.common_folder_path(**expected) @@ -3053,7 +3084,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -3063,7 +3094,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "octopus", } path = AlloyDBCSQLAdminClient.common_organization_path(**expected) @@ -3073,7 +3104,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -3083,7 +3114,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "nudibranch", } path = AlloyDBCSQLAdminClient.common_project_path(**expected) @@ -3093,8 +3124,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3105,8 +3136,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "winkle", + "location": "nautilus", } path = AlloyDBCSQLAdminClient.common_location_path(**expected) diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py index d19585249983..48134743e11d 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_db_admin.py @@ -14954,6 +14954,371 @@ async def test_list_databases_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + service.CreateDatabaseRequest, + dict, + ], +) +def test_create_database(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Database( + name="name_value", + charset="charset_value", + collation="collation_value", + character_type="character_type_value", + is_template=True, + database_template="database_template_value", + is_template_database=True, + ) + response = client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Database) + assert response.name == "name_value" + assert response.charset == "charset_value" + assert response.collation == "collation_value" + assert response.character_type == "character_type_value" + assert response.is_template is True + assert response.database_template == "database_template_value" + assert response.is_template_database is True + + +def test_create_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + +def test_create_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_database_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_database + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_database + ] = mock_rpc + + request = {} + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_database_async( + transport: str = "grpc_asyncio", request_type=service.CreateDatabaseRequest +): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Database( + name="name_value", + charset="charset_value", + collation="collation_value", + character_type="character_type_value", + is_template=True, + database_template="database_template_value", + is_template_database=True, + ) + ) + response = await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Database) + assert response.name == "name_value" + assert response.charset == "charset_value" + assert response.collation == "collation_value" + assert response.character_type == "character_type_value" + assert response.is_template is True + assert response.database_template == "database_template_value" + assert response.is_template_database is True + + +@pytest.mark.asyncio +async def test_create_database_async_from_dict(): + await test_create_database_async(request_type=dict) + + +def test_create_database_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateDatabaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = resources.Database() + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_database_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateDatabaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Database()) + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_database_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_database( + parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].database + mock_val = resources.Database(name="name_value") + assert arg == mock_val + arg = args[0].database_id + mock_val = "database_id_value" + assert arg == mock_val + + +def test_create_database_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + service.CreateDatabaseRequest(), + parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_database_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_database( + parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].database + mock_val = resources.Database(name="name_value") + assert arg == mock_val + arg = args[0].database_id + mock_val = "database_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_database_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_database( + service.CreateDatabaseRequest(), + parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", + ) + + def test_list_clusters_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -22227,11 +22592,272 @@ def test_list_databases_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_databases_rest_required_fields(request_type=service.ListDatabasesRequest): +def test_list_databases_rest_required_fields(request_type=service.ListDatabasesRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_databases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_databases_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_databases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_databases_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{parent=projects/*/locations/*/clusters/*}/databases" + % client.transport._host, + args[1], + ) + + +def test_list_databases_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListDatabasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Database) for i in results) + + pages = list(client.list_databases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_database_rest_required_fields( + request_type=service.CreateDatabaseRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} request_init["parent"] = "" + request_init["database_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22239,32 +22865,32 @@ def test_list_databases_rest_required_fields(request_type=service.ListDatabasesR ) # verify fields with default values are dropped + assert "databaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_databases._get_unset_required_fields(jsonified_request) + ).create_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == request_init["database_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["databaseId"] = "database_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_databases._get_unset_required_fields(jsonified_request) + ).create_database._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("database_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == "database_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22273,7 +22899,7 @@ def test_list_databases_rest_required_fields(request_type=service.ListDatabasesR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListDatabasesResponse() + return_value = resources.Database() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22285,48 +22911,55 @@ def test_list_databases_rest_required_fields(request_type=service.ListDatabasesR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListDatabasesResponse.pb(return_value) + return_value = resources.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) + response = client.create_database(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "databaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_databases_rest_unset_required_fields(): +def test_create_database_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_databases._get_unset_required_fields({}) + unset_fields = transport.create_database._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("databaseId",)) + & set( ( - "filter", - "pageSize", - "pageToken", + "parent", + "databaseId", + "database", ) ) - & set(("parent",)) ) -def test_list_databases_rest_flattened(): +def test_create_database_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22335,7 +22968,7 @@ def test_list_databases_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListDatabasesResponse() + return_value = resources.Database() # get arguments that satisfy an http rule for this method sample_request = { @@ -22345,6 +22978,8 @@ def test_list_databases_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", ) mock_args.update(sample_request) @@ -22352,13 +22987,13 @@ def test_list_databases_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListDatabasesResponse.pb(return_value) + return_value = resources.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(**mock_args) + client.create_database(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -22371,7 +23006,7 @@ def test_list_databases_rest_flattened(): ) -def test_list_databases_rest_flattened_error(transport: str = "rest"): +def test_create_database_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22380,75 +23015,14 @@ def test_list_databases_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_databases( - service.ListDatabasesRequest(), + client.create_database( + service.CreateDatabaseRequest(), parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", ) -def test_list_databases_rest_pager(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - ], - next_page_token="ghi", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListDatabasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" - } - - pager = client.list_databases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Database) for i in results) - - pages = list(client.list_databases(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AlloyDBAdminGrpcTransport( @@ -23348,6 +23922,27 @@ def test_list_databases_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_grpc(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = resources.Database() + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateDatabaseRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = AlloyDBAdminAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -24391,6 +24986,39 @@ async def test_list_databases_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_database_empty_call_grpc_asyncio(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Database( + name="name_value", + charset="charset_value", + collation="collation_value", + character_type="character_type_value", + is_template=True, + database_template="database_template_value", + is_template_database=True, + ) + ) + await client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateDatabaseRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = AlloyDBAdminClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -26782,7 +27410,7 @@ def test_create_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, - "connection_pool_config": {"enabled": True}, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, "gca_config": {"gca_entitlement": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -27058,7 +27686,7 @@ def test_create_secondary_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, - "connection_pool_config": {"enabled": True}, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, "gca_config": {"gca_entitlement": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -27349,7 +27977,11 @@ def test_batch_create_instances_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, - "connection_pool_config": {"enabled": True}, + "connection_pool_config": { + "enabled": True, + "flags": {}, + "pooler_count": 1305, + }, "gca_config": {"gca_entitlement": 1}, }, "request_id": "request_id_value", @@ -27639,7 +28271,7 @@ def test_update_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, - "connection_pool_config": {"enabled": True}, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, "gca_config": {"gca_entitlement": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -30293,9 +30925,124 @@ def test_update_user_rest_interceptors(null_interceptor): transports.AlloyDBAdminRestInterceptor, "pre_update_user" ) as pre: pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.User.to_json(resources.User()) + req.return_value.content = return_value + + request = service.UpdateUserRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata + + client.update_user( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_user(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteUserRequest, + dict, + ], +) +def test_delete_user_rest_call_success(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_user(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_user_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_user" + ) as pre: + pre.assert_not_called() + pb_message = service.DeleteUserRequest.pb(service.DeleteUserRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -30306,19 +31053,15 @@ def test_update_user_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.User.to_json(resources.User()) - req.return_value.content = return_value - request = service.UpdateUserRequest() + request = service.DeleteUserRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.User() - post_with_metadata.return_value = resources.User(), metadata - client.update_user( + client.delete_user( request, metadata=[ ("key", "val"), @@ -30327,18 +31070,14 @@ def test_update_user_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() -def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): +def test_list_databases_rest_bad_request(request_type=service.ListDatabasesRequest): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -30353,47 +31092,51 @@ def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_user(request) + client.list_databases(request) @pytest.mark.parametrize( "request_type", [ - service.DeleteUserRequest, + service.ListDatabasesRequest, dict, ], ) -def test_delete_user_rest_call_success(request_type): +def test_list_databases_rest_call_success(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = service.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_user(request) + response = client.list_databases(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_user_rest_interceptors(null_interceptor): +def test_list_databases_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -30407,10 +31150,16 @@ def test_delete_user_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_delete_user" + transports.AlloyDBAdminRestInterceptor, "post_list_databases" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_databases_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() - pb_message = service.DeleteUserRequest.pb(service.DeleteUserRequest()) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListDatabasesRequest.pb(service.ListDatabasesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -30421,15 +31170,21 @@ def test_delete_user_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListDatabasesResponse.to_json( + service.ListDatabasesResponse() + ) + req.return_value.content = return_value - request = service.DeleteUserRequest() + request = service.ListDatabasesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = service.ListDatabasesResponse() + post_with_metadata.return_value = service.ListDatabasesResponse(), metadata - client.delete_user( + client.list_databases( request, metadata=[ ("key", "val"), @@ -30438,9 +31193,11 @@ def test_delete_user_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_databases_rest_bad_request(request_type=service.ListDatabasesRequest): +def test_create_database_rest_bad_request(request_type=service.CreateDatabaseRequest): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -30460,30 +31217,112 @@ def test_list_databases_rest_bad_request(request_type=service.ListDatabasesReque response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(request) + client.create_database(request) @pytest.mark.parametrize( "request_type", [ - service.ListDatabasesRequest, + service.CreateDatabaseRequest, dict, ], ) -def test_list_databases_rest_call_success(request_type): +def test_create_database_rest_call_success(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["database"] = { + "name": "name_value", + "charset": "charset_value", + "collation": "collation_value", + "character_type": "character_type_value", + "is_template": True, + "database_template": "database_template_value", + "is_template_database": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListDatabasesResponse( - next_page_token="next_page_token_value", + return_value = resources.Database( + name="name_value", + charset="charset_value", + collation="collation_value", + character_type="character_type_value", + is_template=True, + database_template="database_template_value", + is_template_database=True, ) # Wrap the value into a proper Response obj @@ -30491,20 +31330,26 @@ def test_list_databases_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListDatabasesResponse.pb(return_value) + return_value = resources.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) + response = client.create_database(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.Database) + assert response.name == "name_value" + assert response.charset == "charset_value" + assert response.collation == "collation_value" + assert response.character_type == "character_type_value" + assert response.is_template is True + assert response.database_template == "database_template_value" + assert response.is_template_database is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): +def test_create_database_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -30518,16 +31363,16 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_databases" + transports.AlloyDBAdminRestInterceptor, "post_create_database" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_databases_with_metadata" + transports.AlloyDBAdminRestInterceptor, "post_create_database_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_list_databases" + transports.AlloyDBAdminRestInterceptor, "pre_create_database" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = service.ListDatabasesRequest.pb(service.ListDatabasesRequest()) + pb_message = service.CreateDatabaseRequest.pb(service.CreateDatabaseRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -30538,21 +31383,19 @@ def test_list_databases_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListDatabasesResponse.to_json( - service.ListDatabasesResponse() - ) + return_value = resources.Database.to_json(resources.Database()) req.return_value.content = return_value - request = service.ListDatabasesRequest() + request = service.CreateDatabaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListDatabasesResponse() - post_with_metadata.return_value = service.ListDatabasesResponse(), metadata + post.return_value = resources.Database() + post_with_metadata.return_value = resources.Database(), metadata - client.list_databases( + client.create_database( request, metadata=[ ("key", "val"), @@ -31696,6 +32539,26 @@ def test_list_databases_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_rest(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateDatabaseRequest() + + assert args[0] == request_msg + + def test_alloy_db_admin_rest_lro_client(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -31783,6 +32646,7 @@ def test_alloy_db_admin_base_transport(): "update_user", "delete_user", "list_databases", + "create_database", "get_location", "list_locations", "get_operation", @@ -32160,6 +33024,9 @@ def test_alloy_db_admin_client_transport_session_collision(transport_name): session1 = client1.transport.list_databases._session session2 = client2.transport.list_databases._session assert session1 != session2 + session1 = client1.transport.create_database._session + session2 = client2.transport.create_database._session + assert session1 != session2 def test_alloy_db_admin_grpc_transport_channel(): @@ -32397,12 +33264,41 @@ def test_parse_connection_info_path(): assert expected == actual -def test_crypto_key_version_path(): +def test_crypto_key_path(): project = "winkle" location = "nautilus" key_ring = "scallop" crypto_key = "abalone" - crypto_key_version = "squid" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = AlloyDBAdminClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "squid", + "location": "clam", + "key_ring": "whelk", + "crypto_key": "octopus", + } + path = AlloyDBAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "oyster" + location = "nudibranch" + key_ring = "cuttlefish" + crypto_key = "mussel" + crypto_key_version = "winkle" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( project=project, location=location, @@ -32418,11 +33314,11 @@ def test_crypto_key_version_path(): def test_parse_crypto_key_version_path(): expected = { - "project": "clam", - "location": "whelk", - "key_ring": "octopus", - "crypto_key": "oyster", - "crypto_key_version": "nudibranch", + "project": "nautilus", + "location": "scallop", + "key_ring": "abalone", + "crypto_key": "squid", + "crypto_key_version": "clam", } path = AlloyDBAdminClient.crypto_key_version_path(**expected) @@ -32432,10 +33328,10 @@ def test_parse_crypto_key_version_path(): def test_database_path(): - project = "cuttlefish" - location = "mussel" - cluster = "winkle" - database = "nautilus" + project = "whelk" + location = "octopus" + cluster = "oyster" + database = "nudibranch" expected = "projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}".format( project=project, location=location, @@ -32448,10 +33344,10 @@ def test_database_path(): def test_parse_database_path(): expected = { - "project": "scallop", - "location": "abalone", - "cluster": "squid", - "database": "clam", + "project": "cuttlefish", + "location": "mussel", + "cluster": "winkle", + "database": "nautilus", } path = AlloyDBAdminClient.database_path(**expected) @@ -32461,10 +33357,10 @@ def test_parse_database_path(): def test_instance_path(): - project = "whelk" - location = "octopus" - cluster = "oyster" - instance = "nudibranch" + project = "scallop" + location = "abalone" + cluster = "squid" + instance = "clam" expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( project=project, location=location, @@ -32477,10 +33373,10 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "cluster": "winkle", - "instance": "nautilus", + "project": "whelk", + "location": "octopus", + "cluster": "oyster", + "instance": "nudibranch", } path = AlloyDBAdminClient.instance_path(**expected) @@ -32490,8 +33386,8 @@ def test_parse_instance_path(): def test_network_path(): - project = "scallop" - network = "abalone" + project = "cuttlefish" + network = "mussel" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -32502,8 +33398,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "squid", - "network": "clam", + "project": "winkle", + "network": "nautilus", } path = AlloyDBAdminClient.network_path(**expected) @@ -32512,10 +33408,38 @@ def test_parse_network_path(): assert expected == actual +def test_service_attachment_path(): + project = "scallop" + region = "abalone" + service_attachment = "squid" + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + actual = AlloyDBAdminClient.service_attachment_path( + project, region, service_attachment + ) + assert expected == actual + + +def test_parse_service_attachment_path(): + expected = { + "project": "clam", + "region": "whelk", + "service_attachment": "octopus", + } + path = AlloyDBAdminClient.service_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_service_attachment_path(path) + assert expected == actual + + def test_supported_database_flag_path(): - project = "whelk" - location = "octopus" - flag = "oyster" + project = "oyster" + location = "nudibranch" + flag = "cuttlefish" expected = "projects/{project}/locations/{location}/flags/{flag}".format( project=project, location=location, @@ -32527,9 +33451,9 @@ def test_supported_database_flag_path(): def test_parse_supported_database_flag_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "flag": "mussel", + "project": "mussel", + "location": "winkle", + "flag": "nautilus", } path = AlloyDBAdminClient.supported_database_flag_path(**expected) @@ -32539,10 +33463,10 @@ def test_parse_supported_database_flag_path(): def test_user_path(): - project = "winkle" - location = "nautilus" - cluster = "scallop" - user = "abalone" + project = "scallop" + location = "abalone" + cluster = "squid" + user = "clam" expected = "projects/{project}/locations/{location}/clusters/{cluster}/users/{user}".format( project=project, location=location, @@ -32555,10 +33479,10 @@ def test_user_path(): def test_parse_user_path(): expected = { - "project": "squid", - "location": "clam", - "cluster": "whelk", - "user": "octopus", + "project": "whelk", + "location": "octopus", + "cluster": "oyster", + "user": "nudibranch", } path = AlloyDBAdminClient.user_path(**expected) @@ -32568,7 +33492,7 @@ def test_parse_user_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -32578,7 +33502,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "mussel", } path = AlloyDBAdminClient.common_billing_account_path(**expected) @@ -32588,7 +33512,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -32598,7 +33522,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nautilus", } path = AlloyDBAdminClient.common_folder_path(**expected) @@ -32608,7 +33532,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -32618,7 +33542,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "abalone", } path = AlloyDBAdminClient.common_organization_path(**expected) @@ -32628,7 +33552,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -32638,7 +33562,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "clam", } path = AlloyDBAdminClient.common_project_path(**expected) @@ -32648,8 +33572,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -32660,8 +33584,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "oyster", + "location": "nudibranch", } path = AlloyDBAdminClient.common_location_path(**expected) diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_dbcsql_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_dbcsql_admin.py index 979ddd5ee99d..4bb77ef23364 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_dbcsql_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1alpha/test_alloy_dbcsql_admin.py @@ -2956,12 +2956,43 @@ def test_parse_cluster_path(): assert expected == actual -def test_crypto_key_version_path(): +def test_crypto_key_path(): project = "squid" location = "clam" key_ring = "whelk" crypto_key = "octopus" - crypto_key_version = "oyster" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = AlloyDBCSQLAdminClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = AlloyDBCSQLAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBCSQLAdminClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "winkle" + location = "nautilus" + key_ring = "scallop" + crypto_key = "abalone" + crypto_key_version = "squid" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( project=project, location=location, @@ -2977,11 +3008,11 @@ def test_crypto_key_version_path(): def test_parse_crypto_key_version_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "key_ring": "mussel", - "crypto_key": "winkle", - "crypto_key_version": "nautilus", + "project": "clam", + "location": "whelk", + "key_ring": "octopus", + "crypto_key": "oyster", + "crypto_key_version": "nudibranch", } path = AlloyDBCSQLAdminClient.crypto_key_version_path(**expected) @@ -2991,8 +3022,8 @@ def test_parse_crypto_key_version_path(): def test_network_path(): - project = "scallop" - network = "abalone" + project = "cuttlefish" + network = "mussel" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -3003,8 +3034,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "squid", - "network": "clam", + "project": "winkle", + "network": "nautilus", } path = AlloyDBCSQLAdminClient.network_path(**expected) @@ -3014,7 +3045,7 @@ def test_parse_network_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3024,7 +3055,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "abalone", } path = AlloyDBCSQLAdminClient.common_billing_account_path(**expected) @@ -3034,7 +3065,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -3044,7 +3075,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "clam", } path = AlloyDBCSQLAdminClient.common_folder_path(**expected) @@ -3054,7 +3085,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -3064,7 +3095,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "octopus", } path = AlloyDBCSQLAdminClient.common_organization_path(**expected) @@ -3074,7 +3105,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -3084,7 +3115,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "nudibranch", } path = AlloyDBCSQLAdminClient.common_project_path(**expected) @@ -3094,8 +3125,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3106,8 +3137,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "winkle", + "location": "nautilus", } path = AlloyDBCSQLAdminClient.common_location_path(**expected) diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py index 0241b0866dd5..b24df828a260 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_db_admin.py @@ -14942,6 +14942,371 @@ async def test_list_databases_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + service.CreateDatabaseRequest, + dict, + ], +) +def test_create_database(request_type, transport: str = "grpc"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Database( + name="name_value", + charset="charset_value", + collation="collation_value", + character_type="character_type_value", + is_template=True, + database_template="database_template_value", + is_template_database=True, + ) + response = client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = service.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Database) + assert response.name == "name_value" + assert response.charset == "charset_value" + assert response.collation == "collation_value" + assert response.character_type == "character_type_value" + assert response.is_template is True + assert response.database_template == "database_template_value" + assert response.is_template_database is True + + +def test_create_database_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = service.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_database(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateDatabaseRequest( + parent="parent_value", + database_id="database_id_value", + ) + + +def test_create_database_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_database_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_database + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_database + ] = mock_rpc + + request = {} + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_database_async( + transport: str = "grpc_asyncio", request_type=service.CreateDatabaseRequest +): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Database( + name="name_value", + charset="charset_value", + collation="collation_value", + character_type="character_type_value", + is_template=True, + database_template="database_template_value", + is_template_database=True, + ) + ) + response = await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = service.CreateDatabaseRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Database) + assert response.name == "name_value" + assert response.charset == "charset_value" + assert response.collation == "collation_value" + assert response.character_type == "character_type_value" + assert response.is_template is True + assert response.database_template == "database_template_value" + assert response.is_template_database is True + + +@pytest.mark.asyncio +async def test_create_database_async_from_dict(): + await test_create_database_async(request_type=dict) + + +def test_create_database_field_headers(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateDatabaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = resources.Database() + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_database_field_headers_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateDatabaseRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Database()) + await client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_database_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Database() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_database( + parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].database + mock_val = resources.Database(name="name_value") + assert arg == mock_val + arg = args[0].database_id + mock_val = "database_id_value" + assert arg == mock_val + + +def test_create_database_flattened_error(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_database( + service.CreateDatabaseRequest(), + parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_database_flattened_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Database() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Database()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_database( + parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].database + mock_val = resources.Database(name="name_value") + assert arg == mock_val + arg = args[0].database_id + mock_val = "database_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_database_flattened_error_async(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_database( + service.CreateDatabaseRequest(), + parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", + ) + + def test_list_clusters_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -22215,11 +22580,272 @@ def test_list_databases_rest_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -def test_list_databases_rest_required_fields(request_type=service.ListDatabasesRequest): +def test_list_databases_rest_required_fields(request_type=service.ListDatabasesRequest): + transport_class = transports.AlloyDBAdminRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_databases._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = service.ListDatabasesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_databases(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_databases_rest_unset_required_fields(): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_databases._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_databases_rest_flattened(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = service.ListDatabasesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_databases(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{parent=projects/*/locations/*/clusters/*}/databases" + % client.transport._host, + args[1], + ) + + +def test_list_databases_rest_flattened_error(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_databases( + service.ListDatabasesRequest(), + parent="parent_value", + ) + + +def test_list_databases_rest_pager(transport: str = "rest"): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + resources.Database(), + ], + next_page_token="abc", + ), + service.ListDatabasesResponse( + databases=[], + next_page_token="def", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + ], + next_page_token="ghi", + ), + service.ListDatabasesResponse( + databases=[ + resources.Database(), + resources.Database(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(service.ListDatabasesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/clusters/sample3" + } + + pager = client.list_databases(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Database) for i in results) + + pages = list(client.list_databases(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_database_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_database in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_database] = mock_rpc + + request = {} + client.create_database(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_database(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_database_rest_required_fields( + request_type=service.CreateDatabaseRequest, +): transport_class = transports.AlloyDBAdminRestTransport request_init = {} request_init["parent"] = "" + request_init["database_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -22227,32 +22853,32 @@ def test_list_databases_rest_required_fields(request_type=service.ListDatabasesR ) # verify fields with default values are dropped + assert "databaseId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_databases._get_unset_required_fields(jsonified_request) + ).create_database._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == request_init["database_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["databaseId"] = "database_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_databases._get_unset_required_fields(jsonified_request) + ).create_database._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("database_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "databaseId" in jsonified_request + assert jsonified_request["databaseId"] == "database_id_value" client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -22261,7 +22887,7 @@ def test_list_databases_rest_required_fields(request_type=service.ListDatabasesR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = service.ListDatabasesResponse() + return_value = resources.Database() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -22273,48 +22899,55 @@ def test_list_databases_rest_required_fields(request_type=service.ListDatabasesR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListDatabasesResponse.pb(return_value) + return_value = resources.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) + response = client.create_database(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "databaseId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_databases_rest_unset_required_fields(): +def test_create_database_rest_unset_required_fields(): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_databases._get_unset_required_fields({}) + unset_fields = transport.create_database._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("databaseId",)) + & set( ( - "filter", - "pageSize", - "pageToken", + "parent", + "databaseId", + "database", ) ) - & set(("parent",)) ) -def test_list_databases_rest_flattened(): +def test_create_database_rest_flattened(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -22323,7 +22956,7 @@ def test_list_databases_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListDatabasesResponse() + return_value = resources.Database() # get arguments that satisfy an http rule for this method sample_request = { @@ -22333,6 +22966,8 @@ def test_list_databases_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", ) mock_args.update(sample_request) @@ -22340,13 +22975,13 @@ def test_list_databases_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListDatabasesResponse.pb(return_value) + return_value = resources.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(**mock_args) + client.create_database(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -22359,7 +22994,7 @@ def test_list_databases_rest_flattened(): ) -def test_list_databases_rest_flattened_error(transport: str = "rest"): +def test_create_database_rest_flattened_error(transport: str = "rest"): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -22368,75 +23003,14 @@ def test_list_databases_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_databases( - service.ListDatabasesRequest(), + client.create_database( + service.CreateDatabaseRequest(), parent="parent_value", + database=resources.Database(name="name_value"), + database_id="database_id_value", ) -def test_list_databases_rest_pager(transport: str = "rest"): - client = AlloyDBAdminClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - resources.Database(), - ], - next_page_token="abc", - ), - service.ListDatabasesResponse( - databases=[], - next_page_token="def", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - ], - next_page_token="ghi", - ), - service.ListDatabasesResponse( - databases=[ - resources.Database(), - resources.Database(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(service.ListDatabasesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/clusters/sample3" - } - - pager = client.list_databases(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Database) for i in results) - - pages = list(client.list_databases(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AlloyDBAdminGrpcTransport( @@ -23336,6 +23910,27 @@ def test_list_databases_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_grpc(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + call.return_value = resources.Database() + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateDatabaseRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = AlloyDBAdminAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -24376,6 +24971,39 @@ async def test_list_databases_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_database_empty_call_grpc_asyncio(): + client = AlloyDBAdminAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.Database( + name="name_value", + charset="charset_value", + collation="collation_value", + character_type="character_type_value", + is_template=True, + database_template="database_template_value", + is_template_database=True, + ) + ) + await client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateDatabaseRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = AlloyDBAdminClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -26759,7 +27387,7 @@ def test_create_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, - "connection_pool_config": {"enabled": True, "flags": {}}, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, "gca_config": {"gca_entitlement": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -27034,7 +27662,7 @@ def test_create_secondary_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, - "connection_pool_config": {"enabled": True, "flags": {}}, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, "gca_config": {"gca_entitlement": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -27324,7 +27952,11 @@ def test_batch_create_instances_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, - "connection_pool_config": {"enabled": True, "flags": {}}, + "connection_pool_config": { + "enabled": True, + "flags": {}, + "pooler_count": 1305, + }, "gca_config": {"gca_entitlement": 1}, }, "request_id": "request_id_value", @@ -27613,7 +28245,7 @@ def test_update_instance_rest_call_success(request_type): "outbound_public_ip_addresses_value2", ], "activation_policy": 1, - "connection_pool_config": {"enabled": True, "flags": {}}, + "connection_pool_config": {"enabled": True, "flags": {}, "pooler_count": 1305}, "gca_config": {"gca_entitlement": 1}, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -30263,9 +30895,124 @@ def test_update_user_rest_interceptors(null_interceptor): transports.AlloyDBAdminRestInterceptor, "pre_update_user" ) as pre: pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.UpdateUserRequest.pb(service.UpdateUserRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.User.to_json(resources.User()) + req.return_value.content = return_value + + request = service.UpdateUserRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.User() + post_with_metadata.return_value = resources.User(), metadata + + client.update_user( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_user(request) + + +@pytest.mark.parametrize( + "request_type", + [ + service.DeleteUserRequest, + dict, + ], +) +def test_delete_user_rest_call_success(request_type): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_user(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_user_rest_interceptors(null_interceptor): + transport = transports.AlloyDBAdminRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlloyDBAdminRestInterceptor(), + ) + client = AlloyDBAdminClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_delete_user" + ) as pre: + pre.assert_not_called() + pb_message = service.DeleteUserRequest.pb(service.DeleteUserRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -30276,19 +31023,15 @@ def test_update_user_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = resources.User.to_json(resources.User()) - req.return_value.content = return_value - request = service.UpdateUserRequest() + request = service.DeleteUserRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = resources.User() - post_with_metadata.return_value = resources.User(), metadata - client.update_user( + client.delete_user( request, metadata=[ ("key", "val"), @@ -30297,18 +31040,14 @@ def test_update_user_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() -def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): +def test_list_databases_rest_bad_request(request_type=service.ListDatabasesRequest): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -30323,47 +31062,51 @@ def test_delete_user_rest_bad_request(request_type=service.DeleteUserRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_user(request) + client.list_databases(request) @pytest.mark.parametrize( "request_type", [ - service.DeleteUserRequest, + service.ListDatabasesRequest, dict, ], ) -def test_delete_user_rest_call_success(request_type): +def test_list_databases_rest_call_success(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/clusters/sample3/users/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = service.ListDatabasesResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = service.ListDatabasesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_user(request) + response = client.list_databases(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListDatabasesPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_user_rest_interceptors(null_interceptor): +def test_list_databases_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -30377,10 +31120,16 @@ def test_delete_user_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_delete_user" + transports.AlloyDBAdminRestInterceptor, "post_list_databases" + ) as post, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "post_list_databases_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.AlloyDBAdminRestInterceptor, "pre_list_databases" ) as pre: pre.assert_not_called() - pb_message = service.DeleteUserRequest.pb(service.DeleteUserRequest()) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = service.ListDatabasesRequest.pb(service.ListDatabasesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -30391,15 +31140,21 @@ def test_delete_user_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = service.ListDatabasesResponse.to_json( + service.ListDatabasesResponse() + ) + req.return_value.content = return_value - request = service.DeleteUserRequest() + request = service.ListDatabasesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = service.ListDatabasesResponse() + post_with_metadata.return_value = service.ListDatabasesResponse(), metadata - client.delete_user( + client.list_databases( request, metadata=[ ("key", "val"), @@ -30408,9 +31163,11 @@ def test_delete_user_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_databases_rest_bad_request(request_type=service.ListDatabasesRequest): +def test_create_database_rest_bad_request(request_type=service.CreateDatabaseRequest): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -30430,30 +31187,112 @@ def test_list_databases_rest_bad_request(request_type=service.ListDatabasesReque response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_databases(request) + client.create_database(request) @pytest.mark.parametrize( "request_type", [ - service.ListDatabasesRequest, + service.CreateDatabaseRequest, dict, ], ) -def test_list_databases_rest_call_success(request_type): +def test_create_database_rest_call_success(request_type): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2/clusters/sample3"} + request_init["database"] = { + "name": "name_value", + "charset": "charset_value", + "collation": "collation_value", + "character_type": "character_type_value", + "is_template": True, + "database_template": "database_template_value", + "is_template_database": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = service.CreateDatabaseRequest.meta.fields["database"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["database"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["database"][field])): + del request_init["database"][field][i][subfield] + else: + del request_init["database"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = service.ListDatabasesResponse( - next_page_token="next_page_token_value", + return_value = resources.Database( + name="name_value", + charset="charset_value", + collation="collation_value", + character_type="character_type_value", + is_template=True, + database_template="database_template_value", + is_template_database=True, ) # Wrap the value into a proper Response obj @@ -30461,20 +31300,26 @@ def test_list_databases_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = service.ListDatabasesResponse.pb(return_value) + return_value = resources.Database.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_databases(request) + response = client.create_database(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDatabasesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, resources.Database) + assert response.name == "name_value" + assert response.charset == "charset_value" + assert response.collation == "collation_value" + assert response.character_type == "character_type_value" + assert response.is_template is True + assert response.database_template == "database_template_value" + assert response.is_template_database is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_databases_rest_interceptors(null_interceptor): +def test_create_database_rest_interceptors(null_interceptor): transport = transports.AlloyDBAdminRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -30488,16 +31333,16 @@ def test_list_databases_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_databases" + transports.AlloyDBAdminRestInterceptor, "post_create_database" ) as post, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "post_list_databases_with_metadata" + transports.AlloyDBAdminRestInterceptor, "post_create_database_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.AlloyDBAdminRestInterceptor, "pre_list_databases" + transports.AlloyDBAdminRestInterceptor, "pre_create_database" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = service.ListDatabasesRequest.pb(service.ListDatabasesRequest()) + pb_message = service.CreateDatabaseRequest.pb(service.CreateDatabaseRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -30508,21 +31353,19 @@ def test_list_databases_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = service.ListDatabasesResponse.to_json( - service.ListDatabasesResponse() - ) + return_value = resources.Database.to_json(resources.Database()) req.return_value.content = return_value - request = service.ListDatabasesRequest() + request = service.CreateDatabaseRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = service.ListDatabasesResponse() - post_with_metadata.return_value = service.ListDatabasesResponse(), metadata + post.return_value = resources.Database() + post_with_metadata.return_value = resources.Database(), metadata - client.list_databases( + client.create_database( request, metadata=[ ("key", "val"), @@ -31666,6 +32509,26 @@ def test_list_databases_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_database_empty_call_rest(): + client = AlloyDBAdminClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_database), "__call__") as call: + client.create_database(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = service.CreateDatabaseRequest() + + assert args[0] == request_msg + + def test_alloy_db_admin_rest_lro_client(): client = AlloyDBAdminClient( credentials=ga_credentials.AnonymousCredentials(), @@ -31753,6 +32616,7 @@ def test_alloy_db_admin_base_transport(): "update_user", "delete_user", "list_databases", + "create_database", "get_location", "list_locations", "get_operation", @@ -32130,6 +32994,9 @@ def test_alloy_db_admin_client_transport_session_collision(transport_name): session1 = client1.transport.list_databases._session session2 = client2.transport.list_databases._session assert session1 != session2 + session1 = client1.transport.create_database._session + session2 = client2.transport.create_database._session + assert session1 != session2 def test_alloy_db_admin_grpc_transport_channel(): @@ -32367,12 +33234,41 @@ def test_parse_connection_info_path(): assert expected == actual -def test_crypto_key_version_path(): +def test_crypto_key_path(): project = "winkle" location = "nautilus" key_ring = "scallop" crypto_key = "abalone" - crypto_key_version = "squid" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = AlloyDBAdminClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "squid", + "location": "clam", + "key_ring": "whelk", + "crypto_key": "octopus", + } + path = AlloyDBAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "oyster" + location = "nudibranch" + key_ring = "cuttlefish" + crypto_key = "mussel" + crypto_key_version = "winkle" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( project=project, location=location, @@ -32388,11 +33284,11 @@ def test_crypto_key_version_path(): def test_parse_crypto_key_version_path(): expected = { - "project": "clam", - "location": "whelk", - "key_ring": "octopus", - "crypto_key": "oyster", - "crypto_key_version": "nudibranch", + "project": "nautilus", + "location": "scallop", + "key_ring": "abalone", + "crypto_key": "squid", + "crypto_key_version": "clam", } path = AlloyDBAdminClient.crypto_key_version_path(**expected) @@ -32402,10 +33298,10 @@ def test_parse_crypto_key_version_path(): def test_database_path(): - project = "cuttlefish" - location = "mussel" - cluster = "winkle" - database = "nautilus" + project = "whelk" + location = "octopus" + cluster = "oyster" + database = "nudibranch" expected = "projects/{project}/locations/{location}/clusters/{cluster}/databases/{database}".format( project=project, location=location, @@ -32418,10 +33314,10 @@ def test_database_path(): def test_parse_database_path(): expected = { - "project": "scallop", - "location": "abalone", - "cluster": "squid", - "database": "clam", + "project": "cuttlefish", + "location": "mussel", + "cluster": "winkle", + "database": "nautilus", } path = AlloyDBAdminClient.database_path(**expected) @@ -32431,10 +33327,10 @@ def test_parse_database_path(): def test_instance_path(): - project = "whelk" - location = "octopus" - cluster = "oyster" - instance = "nudibranch" + project = "scallop" + location = "abalone" + cluster = "squid" + instance = "clam" expected = "projects/{project}/locations/{location}/clusters/{cluster}/instances/{instance}".format( project=project, location=location, @@ -32447,10 +33343,10 @@ def test_instance_path(): def test_parse_instance_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "cluster": "winkle", - "instance": "nautilus", + "project": "whelk", + "location": "octopus", + "cluster": "oyster", + "instance": "nudibranch", } path = AlloyDBAdminClient.instance_path(**expected) @@ -32460,8 +33356,8 @@ def test_parse_instance_path(): def test_network_path(): - project = "scallop" - network = "abalone" + project = "cuttlefish" + network = "mussel" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -32472,8 +33368,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "squid", - "network": "clam", + "project": "winkle", + "network": "nautilus", } path = AlloyDBAdminClient.network_path(**expected) @@ -32482,10 +33378,38 @@ def test_parse_network_path(): assert expected == actual +def test_service_attachment_path(): + project = "scallop" + region = "abalone" + service_attachment = "squid" + expected = "projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( + project=project, + region=region, + service_attachment=service_attachment, + ) + actual = AlloyDBAdminClient.service_attachment_path( + project, region, service_attachment + ) + assert expected == actual + + +def test_parse_service_attachment_path(): + expected = { + "project": "clam", + "region": "whelk", + "service_attachment": "octopus", + } + path = AlloyDBAdminClient.service_attachment_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBAdminClient.parse_service_attachment_path(path) + assert expected == actual + + def test_supported_database_flag_path(): - project = "whelk" - location = "octopus" - flag = "oyster" + project = "oyster" + location = "nudibranch" + flag = "cuttlefish" expected = "projects/{project}/locations/{location}/flags/{flag}".format( project=project, location=location, @@ -32497,9 +33421,9 @@ def test_supported_database_flag_path(): def test_parse_supported_database_flag_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "flag": "mussel", + "project": "mussel", + "location": "winkle", + "flag": "nautilus", } path = AlloyDBAdminClient.supported_database_flag_path(**expected) @@ -32509,10 +33433,10 @@ def test_parse_supported_database_flag_path(): def test_user_path(): - project = "winkle" - location = "nautilus" - cluster = "scallop" - user = "abalone" + project = "scallop" + location = "abalone" + cluster = "squid" + user = "clam" expected = "projects/{project}/locations/{location}/clusters/{cluster}/users/{user}".format( project=project, location=location, @@ -32525,10 +33449,10 @@ def test_user_path(): def test_parse_user_path(): expected = { - "project": "squid", - "location": "clam", - "cluster": "whelk", - "user": "octopus", + "project": "whelk", + "location": "octopus", + "cluster": "oyster", + "user": "nudibranch", } path = AlloyDBAdminClient.user_path(**expected) @@ -32538,7 +33462,7 @@ def test_parse_user_path(): def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -32548,7 +33472,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "mussel", } path = AlloyDBAdminClient.common_billing_account_path(**expected) @@ -32558,7 +33482,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "cuttlefish" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -32568,7 +33492,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "nautilus", } path = AlloyDBAdminClient.common_folder_path(**expected) @@ -32578,7 +33502,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "winkle" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -32588,7 +33512,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "abalone", } path = AlloyDBAdminClient.common_organization_path(**expected) @@ -32598,7 +33522,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "scallop" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -32608,7 +33532,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "clam", } path = AlloyDBAdminClient.common_project_path(**expected) @@ -32618,8 +33542,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "squid" - location = "clam" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -32630,8 +33554,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "oyster", + "location": "nudibranch", } path = AlloyDBAdminClient.common_location_path(**expected) diff --git a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_dbcsql_admin.py b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_dbcsql_admin.py index b8d51794444d..d659a36634c7 100644 --- a/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_dbcsql_admin.py +++ b/packages/google-cloud-alloydb/tests/unit/gapic/alloydb_v1beta/test_alloy_dbcsql_admin.py @@ -2956,12 +2956,43 @@ def test_parse_cluster_path(): assert expected == actual -def test_crypto_key_version_path(): +def test_crypto_key_path(): project = "squid" location = "clam" key_ring = "whelk" crypto_key = "octopus" - crypto_key_version = "oyster" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = AlloyDBCSQLAdminClient.crypto_key_path( + project, location, key_ring, crypto_key + ) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = AlloyDBCSQLAdminClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = AlloyDBCSQLAdminClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_crypto_key_version_path(): + project = "winkle" + location = "nautilus" + key_ring = "scallop" + crypto_key = "abalone" + crypto_key_version = "squid" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{crypto_key_version}".format( project=project, location=location, @@ -2977,11 +3008,11 @@ def test_crypto_key_version_path(): def test_parse_crypto_key_version_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "key_ring": "mussel", - "crypto_key": "winkle", - "crypto_key_version": "nautilus", + "project": "clam", + "location": "whelk", + "key_ring": "octopus", + "crypto_key": "oyster", + "crypto_key_version": "nudibranch", } path = AlloyDBCSQLAdminClient.crypto_key_version_path(**expected) @@ -2991,8 +3022,8 @@ def test_parse_crypto_key_version_path(): def test_network_path(): - project = "scallop" - network = "abalone" + project = "cuttlefish" + network = "mussel" expected = "projects/{project}/global/networks/{network}".format( project=project, network=network, @@ -3003,8 +3034,8 @@ def test_network_path(): def test_parse_network_path(): expected = { - "project": "squid", - "network": "clam", + "project": "winkle", + "network": "nautilus", } path = AlloyDBCSQLAdminClient.network_path(**expected) @@ -3014,7 +3045,7 @@ def test_parse_network_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3024,7 +3055,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "abalone", } path = AlloyDBCSQLAdminClient.common_billing_account_path(**expected) @@ -3034,7 +3065,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -3044,7 +3075,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "clam", } path = AlloyDBCSQLAdminClient.common_folder_path(**expected) @@ -3054,7 +3085,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -3064,7 +3095,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "octopus", } path = AlloyDBCSQLAdminClient.common_organization_path(**expected) @@ -3074,7 +3105,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -3084,7 +3115,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "nudibranch", } path = AlloyDBCSQLAdminClient.common_project_path(**expected) @@ -3094,8 +3125,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -3106,8 +3137,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "winkle", + "location": "nautilus", } path = AlloyDBCSQLAdminClient.common_location_path(**expected) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/__init__.py index 9978135e2bfb..c9f9d8d6e8f6 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/__init__.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation/__init__.py @@ -30,25 +30,32 @@ CapacityCommitment, CreateAssignmentRequest, CreateCapacityCommitmentRequest, + CreateReservationGroupRequest, CreateReservationRequest, DeleteAssignmentRequest, DeleteCapacityCommitmentRequest, + DeleteReservationGroupRequest, DeleteReservationRequest, Edition, FailoverMode, FailoverReservationRequest, GetBiReservationRequest, GetCapacityCommitmentRequest, + GetReservationGroupRequest, GetReservationRequest, ListAssignmentsRequest, ListAssignmentsResponse, ListCapacityCommitmentsRequest, ListCapacityCommitmentsResponse, + ListReservationGroupsRequest, + ListReservationGroupsResponse, ListReservationsRequest, ListReservationsResponse, MergeCapacityCommitmentsRequest, MoveAssignmentRequest, Reservation, + ReservationGroup, + SchedulingPolicy, SearchAllAssignmentsRequest, SearchAllAssignmentsResponse, SearchAssignmentsRequest, @@ -70,23 +77,30 @@ "CapacityCommitment", "CreateAssignmentRequest", "CreateCapacityCommitmentRequest", + "CreateReservationGroupRequest", "CreateReservationRequest", "DeleteAssignmentRequest", "DeleteCapacityCommitmentRequest", + "DeleteReservationGroupRequest", "DeleteReservationRequest", "FailoverReservationRequest", "GetBiReservationRequest", "GetCapacityCommitmentRequest", + "GetReservationGroupRequest", "GetReservationRequest", "ListAssignmentsRequest", "ListAssignmentsResponse", "ListCapacityCommitmentsRequest", "ListCapacityCommitmentsResponse", + "ListReservationGroupsRequest", + "ListReservationGroupsResponse", "ListReservationsRequest", "ListReservationsResponse", "MergeCapacityCommitmentsRequest", "MoveAssignmentRequest", "Reservation", + "ReservationGroup", + "SchedulingPolicy", "SearchAllAssignmentsRequest", "SearchAllAssignmentsResponse", "SearchAssignmentsRequest", diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/__init__.py index 3f0402d98d56..5d423369dc78 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/__init__.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/__init__.py @@ -28,25 +28,32 @@ CapacityCommitment, CreateAssignmentRequest, CreateCapacityCommitmentRequest, + CreateReservationGroupRequest, CreateReservationRequest, DeleteAssignmentRequest, DeleteCapacityCommitmentRequest, + DeleteReservationGroupRequest, DeleteReservationRequest, Edition, FailoverMode, FailoverReservationRequest, GetBiReservationRequest, GetCapacityCommitmentRequest, + GetReservationGroupRequest, GetReservationRequest, ListAssignmentsRequest, ListAssignmentsResponse, ListCapacityCommitmentsRequest, ListCapacityCommitmentsResponse, + ListReservationGroupsRequest, + ListReservationGroupsResponse, ListReservationsRequest, ListReservationsResponse, MergeCapacityCommitmentsRequest, MoveAssignmentRequest, Reservation, + ReservationGroup, + SchedulingPolicy, SearchAllAssignmentsRequest, SearchAllAssignmentsResponse, SearchAssignmentsRequest, @@ -67,26 +74,33 @@ "CapacityCommitment", "CreateAssignmentRequest", "CreateCapacityCommitmentRequest", + "CreateReservationGroupRequest", "CreateReservationRequest", "DeleteAssignmentRequest", "DeleteCapacityCommitmentRequest", + "DeleteReservationGroupRequest", "DeleteReservationRequest", "Edition", "FailoverMode", "FailoverReservationRequest", "GetBiReservationRequest", "GetCapacityCommitmentRequest", + "GetReservationGroupRequest", "GetReservationRequest", "ListAssignmentsRequest", "ListAssignmentsResponse", "ListCapacityCommitmentsRequest", "ListCapacityCommitmentsResponse", + "ListReservationGroupsRequest", + "ListReservationGroupsResponse", "ListReservationsRequest", "ListReservationsResponse", "MergeCapacityCommitmentsRequest", "MoveAssignmentRequest", "Reservation", + "ReservationGroup", "ReservationServiceClient", + "SchedulingPolicy", "SearchAllAssignmentsRequest", "SearchAllAssignmentsResponse", "SearchAssignmentsRequest", diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_metadata.json b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_metadata.json index 385c3265ef89..7216bd76bb22 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_metadata.json +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/gapic_metadata.json @@ -25,6 +25,11 @@ "create_reservation" ] }, + "CreateReservationGroup": { + "methods": [ + "create_reservation_group" + ] + }, "DeleteAssignment": { "methods": [ "delete_assignment" @@ -40,6 +45,11 @@ "delete_reservation" ] }, + "DeleteReservationGroup": { + "methods": [ + "delete_reservation_group" + ] + }, "FailoverReservation": { "methods": [ "failover_reservation" @@ -55,11 +65,21 @@ "get_capacity_commitment" ] }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, "GetReservation": { "methods": [ "get_reservation" ] }, + "GetReservationGroup": { + "methods": [ + "get_reservation_group" + ] + }, "ListAssignments": { "methods": [ "list_assignments" @@ -70,6 +90,11 @@ "list_capacity_commitments" ] }, + "ListReservationGroups": { + "methods": [ + "list_reservation_groups" + ] + }, "ListReservations": { "methods": [ "list_reservations" @@ -95,11 +120,21 @@ "search_assignments" ] }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, "SplitCapacityCommitment": { "methods": [ "split_capacity_commitment" ] }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, "UpdateAssignment": { "methods": [ "update_assignment" @@ -140,6 +175,11 @@ "create_reservation" ] }, + "CreateReservationGroup": { + "methods": [ + "create_reservation_group" + ] + }, "DeleteAssignment": { "methods": [ "delete_assignment" @@ -155,6 +195,11 @@ "delete_reservation" ] }, + "DeleteReservationGroup": { + "methods": [ + "delete_reservation_group" + ] + }, "FailoverReservation": { "methods": [ "failover_reservation" @@ -170,11 +215,21 @@ "get_capacity_commitment" ] }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, "GetReservation": { "methods": [ "get_reservation" ] }, + "GetReservationGroup": { + "methods": [ + "get_reservation_group" + ] + }, "ListAssignments": { "methods": [ "list_assignments" @@ -185,6 +240,11 @@ "list_capacity_commitments" ] }, + "ListReservationGroups": { + "methods": [ + "list_reservation_groups" + ] + }, "ListReservations": { "methods": [ "list_reservations" @@ -210,11 +270,21 @@ "search_assignments" ] }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, "SplitCapacityCommitment": { "methods": [ "split_capacity_commitment" ] }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, "UpdateAssignment": { "methods": [ "update_assignment" @@ -255,6 +325,11 @@ "create_reservation" ] }, + "CreateReservationGroup": { + "methods": [ + "create_reservation_group" + ] + }, "DeleteAssignment": { "methods": [ "delete_assignment" @@ -270,6 +345,11 @@ "delete_reservation" ] }, + "DeleteReservationGroup": { + "methods": [ + "delete_reservation_group" + ] + }, "FailoverReservation": { "methods": [ "failover_reservation" @@ -285,11 +365,21 @@ "get_capacity_commitment" ] }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, "GetReservation": { "methods": [ "get_reservation" ] }, + "GetReservationGroup": { + "methods": [ + "get_reservation_group" + ] + }, "ListAssignments": { "methods": [ "list_assignments" @@ -300,6 +390,11 @@ "list_capacity_commitments" ] }, + "ListReservationGroups": { + "methods": [ + "list_reservation_groups" + ] + }, "ListReservations": { "methods": [ "list_reservations" @@ -325,11 +420,21 @@ "search_assignments" ] }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, "SplitCapacityCommitment": { "methods": [ "split_capacity_commitment" ] }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, "UpdateAssignment": { "methods": [ "update_assignment" diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py index ca32eed92ed5..f398519c989f 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/async_client.py @@ -45,6 +45,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -111,6 +113,12 @@ class ReservationServiceAsyncClient: parse_reservation_path = staticmethod( ReservationServiceClient.parse_reservation_path ) + reservation_group_path = staticmethod( + ReservationServiceClient.reservation_group_path + ) + parse_reservation_group_path = staticmethod( + ReservationServiceClient.parse_reservation_group_path + ) common_billing_account_path = staticmethod( ReservationServiceClient.common_billing_account_path ) @@ -3189,6 +3197,836 @@ async def sample_update_bi_reservation(): # Done; return the response. return response + async def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a resource. May return: + + - A\ ``NOT_FOUND`` error if the resource doesn't exist or you + don't have the permission to view it. + - An empty policy if the resource exists but doesn't have a set + policy. + + Supported resources are: + + - Reservations + - ReservationAssignments + + To call this method, you must have the following Google IAM + permissions: + + - ``bigqueryreservation.reservations.getIamPolicy`` to get + policies on reservations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_get_iam_policy(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]]): + The request object. Request message for ``GetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_iam_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets an access control policy for a resource. Replaces any + existing policy. + + Supported resources are: + + - Reservations + + To call this method, you must have the following Google IAM + permissions: + + - ``bigqueryreservation.reservations.setIamPolicy`` to set + policies on reservations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_set_iam_policy(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]]): + The request object. Request message for ``SetIamPolicy`` method. + resource (:class:`str`): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_iam_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Gets your permissions on a resource. Returns an empty + set of permissions if the resource doesn't exist. + + Supported resources are: + + - Reservations + + No Google IAM permissions are required to call this + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + async def sample_test_iam_permissions(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_reservation_group( + self, + request: Optional[ + Union[reservation.CreateReservationGroupRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.ReservationGroup: + r"""Creates a new reservation group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_create_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateReservationGroupRequest( + parent="parent_value", + reservation_group_id="reservation_group_id_value", + ) + + # Make the request + response = await client.create_reservation_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.CreateReservationGroupRequest, dict]]): + The request object. The request for + [ReservationService.CreateReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservationGroup]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.bigquery_reservation_v1.types.ReservationGroup: + A reservation group is a container + for reservations. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, reservation.CreateReservationGroupRequest): + request = reservation.CreateReservationGroupRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_reservation_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_reservation_group( + self, + request: Optional[Union[reservation.GetReservationGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.ReservationGroup: + r"""Returns information about the reservation group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_get_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetReservationGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_reservation_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.GetReservationGroupRequest, dict]]): + The request object. The request for + [ReservationService.GetReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.GetReservationGroup]. + name (:class:`str`): + Required. Resource name of the reservation group to + retrieve. E.g., + ``projects/myproject/locations/US/reservationGroups/team1-prod`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.bigquery_reservation_v1.types.ReservationGroup: + A reservation group is a container + for reservations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, reservation.GetReservationGroupRequest): + request = reservation.GetReservationGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_reservation_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_reservation_group( + self, + request: Optional[ + Union[reservation.DeleteReservationGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a reservation. Returns + ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has + assignments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_delete_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteReservationGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_reservation_group(request=request) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.DeleteReservationGroupRequest, dict]]): + The request object. The request for + [ReservationService.DeleteReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservationGroup]. + name (:class:`str`): + Required. Resource name of the reservation group to + retrieve. E.g., + ``projects/myproject/locations/US/reservationGroups/team1-prod`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, reservation.DeleteReservationGroupRequest): + request = reservation.DeleteReservationGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_reservation_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_reservation_groups( + self, + request: Optional[Union[reservation.ListReservationGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListReservationGroupsAsyncPager: + r"""Lists all the reservation groups for the project in + the specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + async def sample_list_reservation_groups(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListReservationGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_reservation_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.bigquery_reservation_v1.types.ListReservationGroupsRequest, dict]]): + The request object. The request for + [ReservationService.ListReservationGroups][google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups]. + parent (:class:`str`): + Required. The parent resource name containing project + and location, e.g.: ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationGroupsAsyncPager: + The response for + [ReservationService.ListReservationGroups][google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, reservation.ListReservationGroupsRequest): + request = reservation.ListReservationGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_reservation_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListReservationGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "ReservationServiceAsyncClient": return self diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py index f231c2c3bf50..25b4924c96d9 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/client.py @@ -61,6 +61,8 @@ _LOGGER = std_logging.getLogger(__name__) +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -307,6 +309,28 @@ def parse_reservation_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def reservation_group_path( + project: str, + location: str, + reservation_group: str, + ) -> str: + """Returns a fully-qualified reservation_group string.""" + return "projects/{project}/locations/{location}/reservationGroups/{reservation_group}".format( + project=project, + location=location, + reservation_group=reservation_group, + ) + + @staticmethod + def parse_reservation_group_path(path: str) -> Dict[str, str]: + """Parses a reservation_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/reservationGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -3613,6 +3637,826 @@ def sample_update_bi_reservation(): # Done; return the response. return response + def get_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.GetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the access control policy for a resource. May return: + + - A\ ``NOT_FOUND`` error if the resource doesn't exist or you + don't have the permission to view it. + - An empty policy if the resource exists but doesn't have a set + policy. + + Supported resources are: + + - Reservations + - ReservationAssignments + + To call this method, you must have the following Google IAM + permissions: + + - ``bigqueryreservation.reservations.getIamPolicy`` to get + policies on reservations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_get_iam_policy(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.GetIamPolicyRequest, dict]): + The request object. Request message for ``GetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being requested. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.GetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[Union[iam_policy_pb2.SetIamPolicyRequest, dict]] = None, + *, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets an access control policy for a resource. Replaces any + existing policy. + + Supported resources are: + + - Reservations + + To call this method, you must have the following Google IAM + permissions: + + - ``bigqueryreservation.reservations.setIamPolicy`` to set + policies on reservations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_set_iam_policy(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.SetIamPolicyRequest, dict]): + The request object. Request message for ``SetIamPolicy`` method. + resource (str): + REQUIRED: The resource for which the + policy is being specified. See the + operation documentation for the + appropriate value for this field. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.SetIamPolicyRequest() + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[Union[iam_policy_pb2.TestIamPermissionsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Gets your permissions on a resource. Returns an empty + set of permissions if the resource doesn't exist. + + Supported resources are: + + - Reservations + + No Google IAM permissions are required to call this + method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + from google.iam.v1 import iam_policy_pb2 # type: ignore + + def sample_test_iam_permissions(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest, dict]): + The request object. Request message for ``TestIamPermissions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. + """ + # Create or coerce a protobuf request object. + if isinstance(request, dict): + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_reservation_group( + self, + request: Optional[ + Union[reservation.CreateReservationGroupRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.ReservationGroup: + r"""Creates a new reservation group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_create_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateReservationGroupRequest( + parent="parent_value", + reservation_group_id="reservation_group_id_value", + ) + + # Make the request + response = client.create_reservation_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.CreateReservationGroupRequest, dict]): + The request object. The request for + [ReservationService.CreateReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservationGroup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.bigquery_reservation_v1.types.ReservationGroup: + A reservation group is a container + for reservations. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, reservation.CreateReservationGroupRequest): + request = reservation.CreateReservationGroupRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_reservation_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_reservation_group( + self, + request: Optional[Union[reservation.GetReservationGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.ReservationGroup: + r"""Returns information about the reservation group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_get_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetReservationGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_reservation_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.GetReservationGroupRequest, dict]): + The request object. The request for + [ReservationService.GetReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.GetReservationGroup]. + name (str): + Required. Resource name of the reservation group to + retrieve. E.g., + ``projects/myproject/locations/US/reservationGroups/team1-prod`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.bigquery_reservation_v1.types.ReservationGroup: + A reservation group is a container + for reservations. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, reservation.GetReservationGroupRequest): + request = reservation.GetReservationGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_reservation_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_reservation_group( + self, + request: Optional[ + Union[reservation.DeleteReservationGroupRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a reservation. Returns + ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has + assignments. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_delete_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteReservationGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_reservation_group(request=request) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.DeleteReservationGroupRequest, dict]): + The request object. The request for + [ReservationService.DeleteReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservationGroup]. + name (str): + Required. Resource name of the reservation group to + retrieve. E.g., + ``projects/myproject/locations/US/reservationGroups/team1-prod`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, reservation.DeleteReservationGroupRequest): + request = reservation.DeleteReservationGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_reservation_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_reservation_groups( + self, + request: Optional[Union[reservation.ListReservationGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListReservationGroupsPager: + r"""Lists all the reservation groups for the project in + the specified location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import bigquery_reservation_v1 + + def sample_list_reservation_groups(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListReservationGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_reservation_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.bigquery_reservation_v1.types.ListReservationGroupsRequest, dict]): + The request object. The request for + [ReservationService.ListReservationGroups][google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups]. + parent (str): + Required. The parent resource name containing project + and location, e.g.: ``projects/myproject/locations/US`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationGroupsPager: + The response for + [ReservationService.ListReservationGroups][google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, reservation.ListReservationGroupsRequest): + request = reservation.ListReservationGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_reservation_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListReservationGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ReservationServiceClient": return self diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py index aee5c7b808d5..8627ddb401f8 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/pagers.py @@ -819,3 +819,159 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListReservationGroupsPager: + """A pager for iterating through ``list_reservation_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.ListReservationGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``reservation_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListReservationGroups`` requests and continue to iterate + through the ``reservation_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListReservationGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., reservation.ListReservationGroupsResponse], + request: reservation.ListReservationGroupsRequest, + response: reservation.ListReservationGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.ListReservationGroupsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.ListReservationGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = reservation.ListReservationGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[reservation.ListReservationGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[reservation.ReservationGroup]: + for page in self.pages: + yield from page.reservation_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListReservationGroupsAsyncPager: + """A pager for iterating through ``list_reservation_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.bigquery_reservation_v1.types.ListReservationGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``reservation_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListReservationGroups`` requests and continue to iterate + through the ``reservation_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.bigquery_reservation_v1.types.ListReservationGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[reservation.ListReservationGroupsResponse]], + request: reservation.ListReservationGroupsRequest, + response: reservation.ListReservationGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.bigquery_reservation_v1.types.ListReservationGroupsRequest): + The initial request object. + response (google.cloud.bigquery_reservation_v1.types.ListReservationGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = reservation.ListReservationGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[reservation.ListReservationGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[reservation.ReservationGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.reservation_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py index 87d865fa6372..4149670c19de 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/base.py @@ -22,6 +22,8 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import google.protobuf from google.protobuf import empty_pb2 # type: ignore @@ -347,6 +349,41 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.create_reservation_group: gapic_v1.method.wrap_method( + self.create_reservation_group, + default_timeout=None, + client_info=client_info, + ), + self.get_reservation_group: gapic_v1.method.wrap_method( + self.get_reservation_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_reservation_group: gapic_v1.method.wrap_method( + self.delete_reservation_group, + default_timeout=None, + client_info=client_info, + ), + self.list_reservation_groups: gapic_v1.method.wrap_method( + self.list_reservation_groups, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -582,6 +619,75 @@ def update_bi_reservation( ]: raise NotImplementedError() + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_reservation_group( + self, + ) -> Callable[ + [reservation.CreateReservationGroupRequest], + Union[reservation.ReservationGroup, Awaitable[reservation.ReservationGroup]], + ]: + raise NotImplementedError() + + @property + def get_reservation_group( + self, + ) -> Callable[ + [reservation.GetReservationGroupRequest], + Union[reservation.ReservationGroup, Awaitable[reservation.ReservationGroup]], + ]: + raise NotImplementedError() + + @property + def delete_reservation_group( + self, + ) -> Callable[ + [reservation.DeleteReservationGroupRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def list_reservation_groups( + self, + ) -> Callable[ + [reservation.ListReservationGroupsRequest], + Union[ + reservation.ListReservationGroupsResponse, + Awaitable[reservation.ListReservationGroupsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py index 901cc5ccbcef..48cefe4365f6 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc.py @@ -23,6 +23,8 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message @@ -1111,6 +1113,236 @@ def update_bi_reservation( ) return self._stubs["update_bi_reservation"] + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a resource. May return: + + - A\ ``NOT_FOUND`` error if the resource doesn't exist or you + don't have the permission to view it. + - An empty policy if the resource exists but doesn't have a set + policy. + + Supported resources are: + + - Reservations + - ReservationAssignments + + To call this method, you must have the following Google IAM + permissions: + + - ``bigqueryreservation.reservations.getIamPolicy`` to get + policies on reservations. + + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets an access control policy for a resource. Replaces any + existing policy. + + Supported resources are: + + - Reservations + + To call this method, you must have the following Google IAM + permissions: + + - ``bigqueryreservation.reservations.setIamPolicy`` to set + policies on reservations. + + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Gets your permissions on a resource. Returns an empty + set of permissions if the resource doesn't exist. + + Supported resources are: + + - Reservations + + No Google IAM permissions are required to call this + method. + + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def create_reservation_group( + self, + ) -> Callable[ + [reservation.CreateReservationGroupRequest], reservation.ReservationGroup + ]: + r"""Return a callable for the create reservation group method over gRPC. + + Creates a new reservation group. + + Returns: + Callable[[~.CreateReservationGroupRequest], + ~.ReservationGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_reservation_group" not in self._stubs: + self._stubs["create_reservation_group"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/CreateReservationGroup", + request_serializer=reservation.CreateReservationGroupRequest.serialize, + response_deserializer=reservation.ReservationGroup.deserialize, + ) + return self._stubs["create_reservation_group"] + + @property + def get_reservation_group( + self, + ) -> Callable[ + [reservation.GetReservationGroupRequest], reservation.ReservationGroup + ]: + r"""Return a callable for the get reservation group method over gRPC. + + Returns information about the reservation group. + + Returns: + Callable[[~.GetReservationGroupRequest], + ~.ReservationGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_reservation_group" not in self._stubs: + self._stubs["get_reservation_group"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetReservationGroup", + request_serializer=reservation.GetReservationGroupRequest.serialize, + response_deserializer=reservation.ReservationGroup.deserialize, + ) + return self._stubs["get_reservation_group"] + + @property + def delete_reservation_group( + self, + ) -> Callable[[reservation.DeleteReservationGroupRequest], empty_pb2.Empty]: + r"""Return a callable for the delete reservation group method over gRPC. + + Deletes a reservation. Returns + ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has + assignments. + + Returns: + Callable[[~.DeleteReservationGroupRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_reservation_group" not in self._stubs: + self._stubs["delete_reservation_group"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/DeleteReservationGroup", + request_serializer=reservation.DeleteReservationGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_reservation_group"] + + @property + def list_reservation_groups( + self, + ) -> Callable[ + [reservation.ListReservationGroupsRequest], + reservation.ListReservationGroupsResponse, + ]: + r"""Return a callable for the list reservation groups method over gRPC. + + Lists all the reservation groups for the project in + the specified location. + + Returns: + Callable[[~.ListReservationGroupsRequest], + ~.ListReservationGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_reservation_groups" not in self._stubs: + self._stubs["list_reservation_groups"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/ListReservationGroups", + request_serializer=reservation.ListReservationGroupsRequest.serialize, + response_deserializer=reservation.ListReservationGroupsResponse.deserialize, + ) + return self._stubs["list_reservation_groups"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py index f779e288e885..adfda3b912d0 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/grpc_asyncio.py @@ -25,6 +25,8 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf.json_format import MessageToJson import google.protobuf.message @@ -1144,6 +1146,240 @@ def update_bi_reservation( ) return self._stubs["update_bi_reservation"] + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + + Gets the access control policy for a resource. May return: + + - A\ ``NOT_FOUND`` error if the resource doesn't exist or you + don't have the permission to view it. + - An empty policy if the resource exists but doesn't have a set + policy. + + Supported resources are: + + - Reservations + - ReservationAssignments + + To call this method, you must have the following Google IAM + permissions: + + - ``bigqueryreservation.reservations.getIamPolicy`` to get + policies on reservations. + + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + + Sets an access control policy for a resource. Replaces any + existing policy. + + Supported resources are: + + - Reservations + + To call this method, you must have the following Google IAM + permissions: + + - ``bigqueryreservation.reservations.setIamPolicy`` to set + policies on reservations. + + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + + Gets your permissions on a resource. Returns an empty + set of permissions if the resource doesn't exist. + + Supported resources are: + + - Reservations + + No Google IAM permissions are required to call this + method. + + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def create_reservation_group( + self, + ) -> Callable[ + [reservation.CreateReservationGroupRequest], + Awaitable[reservation.ReservationGroup], + ]: + r"""Return a callable for the create reservation group method over gRPC. + + Creates a new reservation group. + + Returns: + Callable[[~.CreateReservationGroupRequest], + Awaitable[~.ReservationGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_reservation_group" not in self._stubs: + self._stubs["create_reservation_group"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/CreateReservationGroup", + request_serializer=reservation.CreateReservationGroupRequest.serialize, + response_deserializer=reservation.ReservationGroup.deserialize, + ) + return self._stubs["create_reservation_group"] + + @property + def get_reservation_group( + self, + ) -> Callable[ + [reservation.GetReservationGroupRequest], + Awaitable[reservation.ReservationGroup], + ]: + r"""Return a callable for the get reservation group method over gRPC. + + Returns information about the reservation group. + + Returns: + Callable[[~.GetReservationGroupRequest], + Awaitable[~.ReservationGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_reservation_group" not in self._stubs: + self._stubs["get_reservation_group"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/GetReservationGroup", + request_serializer=reservation.GetReservationGroupRequest.serialize, + response_deserializer=reservation.ReservationGroup.deserialize, + ) + return self._stubs["get_reservation_group"] + + @property + def delete_reservation_group( + self, + ) -> Callable[ + [reservation.DeleteReservationGroupRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete reservation group method over gRPC. + + Deletes a reservation. Returns + ``google.rpc.Code.FAILED_PRECONDITION`` when reservation has + assignments. + + Returns: + Callable[[~.DeleteReservationGroupRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_reservation_group" not in self._stubs: + self._stubs["delete_reservation_group"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/DeleteReservationGroup", + request_serializer=reservation.DeleteReservationGroupRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_reservation_group"] + + @property + def list_reservation_groups( + self, + ) -> Callable[ + [reservation.ListReservationGroupsRequest], + Awaitable[reservation.ListReservationGroupsResponse], + ]: + r"""Return a callable for the list reservation groups method over gRPC. + + Lists all the reservation groups for the project in + the specified location. + + Returns: + Callable[[~.ListReservationGroupsRequest], + Awaitable[~.ListReservationGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_reservation_groups" not in self._stubs: + self._stubs["list_reservation_groups"] = self._logged_channel.unary_unary( + "/google.cloud.bigquery.reservation.v1.ReservationService/ListReservationGroups", + request_serializer=reservation.ListReservationGroupsRequest.serialize, + response_deserializer=reservation.ListReservationGroupsResponse.deserialize, + ) + return self._stubs["list_reservation_groups"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1357,6 +1593,41 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.create_reservation_group: self._wrap_method( + self.create_reservation_group, + default_timeout=None, + client_info=client_info, + ), + self.get_reservation_group: self._wrap_method( + self.get_reservation_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_reservation_group: self._wrap_method( + self.delete_reservation_group, + default_timeout=None, + client_info=client_info, + ), + self.list_reservation_groups: self._wrap_method( + self.list_reservation_groups, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py index 132d7126b60e..3a17ba8ef4b7 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest.py @@ -24,6 +24,8 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore import google.protobuf from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format @@ -98,6 +100,14 @@ def post_create_reservation(self, response): logging.log(f"Received response: {response}") return response + def pre_create_reservation_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_reservation_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_assignment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -110,6 +120,10 @@ def pre_delete_reservation(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_reservation_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_failover_reservation(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -134,6 +148,14 @@ def post_get_capacity_commitment(self, response): logging.log(f"Received response: {response}") return response + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_reservation(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -142,6 +164,14 @@ def post_get_reservation(self, response): logging.log(f"Received response: {response}") return response + def pre_get_reservation_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_reservation_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_assignments(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -158,6 +188,14 @@ def post_list_capacity_commitments(self, response): logging.log(f"Received response: {response}") return response + def pre_list_reservation_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_reservation_groups(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_reservations(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -198,6 +236,14 @@ def post_search_assignments(self, response): logging.log(f"Received response: {response}") return response + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + def pre_split_capacity_commitment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -206,6 +252,14 @@ def post_split_capacity_commitment(self, response): logging.log(f"Received response: {response}") return response + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_assignment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -390,6 +444,55 @@ def post_create_reservation_with_metadata( """ return response, metadata + def pre_create_reservation_group( + self, + request: reservation.CreateReservationGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.CreateReservationGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_reservation_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_create_reservation_group( + self, response: reservation.ReservationGroup + ) -> reservation.ReservationGroup: + """Post-rpc interceptor for create_reservation_group + + DEPRECATED. Please use the `post_create_reservation_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. This `post_create_reservation_group` interceptor runs + before the `post_create_reservation_group_with_metadata` interceptor. + """ + return response + + def post_create_reservation_group_with_metadata( + self, + response: reservation.ReservationGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.ReservationGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_reservation_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_create_reservation_group_with_metadata` + interceptor in new development instead of the `post_create_reservation_group` interceptor. + When both interceptors are used, this `post_create_reservation_group_with_metadata` interceptor runs after the + `post_create_reservation_group` interceptor. The (possibly modified) response returned by + `post_create_reservation_group` will be passed to + `post_create_reservation_group_with_metadata`. + """ + return response, metadata + def pre_delete_assignment( self, request: reservation.DeleteAssignmentRequest, @@ -433,6 +536,21 @@ def pre_delete_reservation( """ return request, metadata + def pre_delete_reservation_group( + self, + request: reservation.DeleteReservationGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.DeleteReservationGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_reservation_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + def pre_failover_reservation( self, request: reservation.FailoverReservationRequest, @@ -578,6 +696,52 @@ def post_get_capacity_commitment_with_metadata( """ return response, metadata + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + def pre_get_reservation( self, request: reservation.GetReservationRequest, @@ -626,6 +790,54 @@ def post_get_reservation_with_metadata( """ return response, metadata + def pre_get_reservation_group( + self, + request: reservation.GetReservationGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.GetReservationGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_reservation_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_get_reservation_group( + self, response: reservation.ReservationGroup + ) -> reservation.ReservationGroup: + """Post-rpc interceptor for get_reservation_group + + DEPRECATED. Please use the `post_get_reservation_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. This `post_get_reservation_group` interceptor runs + before the `post_get_reservation_group_with_metadata` interceptor. + """ + return response + + def post_get_reservation_group_with_metadata( + self, + response: reservation.ReservationGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[reservation.ReservationGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_reservation_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_get_reservation_group_with_metadata` + interceptor in new development instead of the `post_get_reservation_group` interceptor. + When both interceptors are used, this `post_get_reservation_group_with_metadata` interceptor runs after the + `post_get_reservation_group` interceptor. The (possibly modified) response returned by + `post_get_reservation_group` will be passed to + `post_get_reservation_group_with_metadata`. + """ + return response, metadata + def pre_list_assignments( self, request: reservation.ListAssignmentsRequest, @@ -728,6 +940,58 @@ def post_list_capacity_commitments_with_metadata( """ return response, metadata + def pre_list_reservation_groups( + self, + request: reservation.ListReservationGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.ListReservationGroupsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_reservation_groups + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_list_reservation_groups( + self, response: reservation.ListReservationGroupsResponse + ) -> reservation.ListReservationGroupsResponse: + """Post-rpc interceptor for list_reservation_groups + + DEPRECATED. Please use the `post_list_reservation_groups_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. This `post_list_reservation_groups` interceptor runs + before the `post_list_reservation_groups_with_metadata` interceptor. + """ + return response + + def post_list_reservation_groups_with_metadata( + self, + response: reservation.ListReservationGroupsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + reservation.ListReservationGroupsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_reservation_groups + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_list_reservation_groups_with_metadata` + interceptor in new development instead of the `post_list_reservation_groups` interceptor. + When both interceptors are used, this `post_list_reservation_groups_with_metadata` interceptor runs after the + `post_list_reservation_groups` interceptor. The (possibly modified) response returned by + `post_list_reservation_groups` will be passed to + `post_list_reservation_groups_with_metadata`. + """ + return response, metadata + def pre_list_reservations( self, request: reservation.ListReservationsRequest, @@ -976,6 +1240,52 @@ def post_search_assignments_with_metadata( """ return response, metadata + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata( + self, + response: policy_pb2.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[policy_pb2.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + def pre_split_capacity_commitment( self, request: reservation.SplitCapacityCommitmentRequest, @@ -1028,6 +1338,58 @@ def post_split_capacity_commitment_with_metadata( """ return response, metadata + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReservationService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ReservationService server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata( + self, + response: iam_policy_pb2.TestIamPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReservationService server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + def pre_update_assignment( self, request: reservation.UpdateAssignmentRequest, @@ -1810,12 +2172,12 @@ def __call__( ) return resp - class _DeleteAssignment( - _BaseReservationServiceRestTransport._BaseDeleteAssignment, + class _CreateReservationGroup( + _BaseReservationServiceRestTransport._BaseCreateReservationGroup, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.DeleteAssignment") + return hash("ReservationServiceRestTransport.CreateReservationGroup") @staticmethod def _get_response( @@ -1836,25 +2198,24 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: reservation.DeleteAssignmentRequest, + request: reservation.CreateReservationGroupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the delete assignment method over HTTP. + ) -> reservation.ReservationGroup: + r"""Call the create reservation group method over HTTP. Args: - request (~.reservation.DeleteAssignmentRequest): + request (~.reservation.CreateReservationGroupRequest): The request object. The request for - [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. - Note: "bigquery.reservationAssignments.delete" - permission is required on the related assignee. + [ReservationService.CreateReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservationGroup]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1862,21 +2223,31 @@ def __call__( sent along with the request as metadata. Normally, each value must be of type `str`, but for metadata keys ending with the suffix `-bin`, the corresponding values must be of type `bytes`. + + Returns: + ~.reservation.ReservationGroup: + A reservation group is a container + for reservations. + """ http_options = ( - _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_http_options() + _BaseReservationServiceRestTransport._BaseCreateReservationGroup._get_http_options() ) - request, metadata = self._interceptor.pre_delete_assignment( + request, metadata = self._interceptor.pre_create_reservation_group( request, metadata ) - transcoded_request = _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_transcoded_request( + transcoded_request = _BaseReservationServiceRestTransport._BaseCreateReservationGroup._get_transcoded_request( http_options, request ) + body = _BaseReservationServiceRestTransport._BaseCreateReservationGroup._get_request_body_json( + transcoded_request + ) + # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseCreateReservationGroup._get_query_params_json( transcoded_request ) @@ -1888,7 +2259,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = json_format.MessageToJson(request) + request_payload = type(request).to_json(request) except: request_payload = None http_request = { @@ -1898,26 +2269,175 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.DeleteAssignment", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.CreateReservationGroup", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "DeleteAssignment", + "rpcName": "CreateReservationGroup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ReservationServiceRestTransport._DeleteAssignment._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + response = ( + ReservationServiceRestTransport._CreateReservationGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.ReservationGroup() + pb_resp = reservation.ReservationGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_reservation_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_reservation_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = reservation.ReservationGroup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.create_reservation_group", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "CreateReservationGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteAssignment( + _BaseReservationServiceRestTransport._BaseDeleteAssignment, + ReservationServiceRestStub, + ): + def __hash__(self): + return hash("ReservationServiceRestTransport.DeleteAssignment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: reservation.DeleteAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete assignment method over HTTP. + + Args: + request (~.reservation.DeleteAssignmentRequest): + The request object. The request for + [ReservationService.DeleteAssignment][google.cloud.bigquery.reservation.v1.ReservationService.DeleteAssignment]. + Note: "bigquery.reservationAssignments.delete" + permission is required on the related assignee. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_assignment( + request, metadata + ) + transcoded_request = _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseReservationServiceRestTransport._BaseDeleteAssignment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.DeleteAssignment", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "DeleteAssignment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ReservationServiceRestTransport._DeleteAssignment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) @@ -2145,6 +2665,118 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _DeleteReservationGroup( + _BaseReservationServiceRestTransport._BaseDeleteReservationGroup, + ReservationServiceRestStub, + ): + def __hash__(self): + return hash("ReservationServiceRestTransport.DeleteReservationGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: reservation.DeleteReservationGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete reservation group method over HTTP. + + Args: + request (~.reservation.DeleteReservationGroupRequest): + The request object. The request for + [ReservationService.DeleteReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservationGroup]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseReservationServiceRestTransport._BaseDeleteReservationGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_reservation_group( + request, metadata + ) + transcoded_request = _BaseReservationServiceRestTransport._BaseDeleteReservationGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseReservationServiceRestTransport._BaseDeleteReservationGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.DeleteReservationGroup", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "DeleteReservationGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + ReservationServiceRestTransport._DeleteReservationGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + class _FailoverReservation( _BaseReservationServiceRestTransport._BaseFailoverReservation, ReservationServiceRestStub, @@ -2615,12 +3247,12 @@ def __call__( ) return resp - class _GetReservation( - _BaseReservationServiceRestTransport._BaseGetReservation, + class _GetIamPolicy( + _BaseReservationServiceRestTransport._BaseGetIamPolicy, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.GetReservation") + return hash("ReservationServiceRestTransport.GetIamPolicy") @staticmethod def _get_response( @@ -2646,18 +3278,17 @@ def _get_response( def __call__( self, - request: reservation.GetReservationRequest, + request: iam_policy_pb2.GetIamPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Reservation: - r"""Call the get reservation method over HTTP. + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. Args: - request (~.reservation.GetReservationRequest): - The request object. The request for - [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. + request (~.iam_policy_pb2.GetIamPolicyRequest): + The request object. Request message for ``GetIamPolicy`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2667,23 +3298,96 @@ def __call__( be of type `bytes`. Returns: - ~.reservation.Reservation: - A reservation is a mechanism used to - guarantee slots to users. + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. """ http_options = ( - _BaseReservationServiceRestTransport._BaseGetReservation._get_http_options() + _BaseReservationServiceRestTransport._BaseGetIamPolicy._get_http_options() ) - request, metadata = self._interceptor.pre_get_reservation(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseGetReservation._get_transcoded_request( + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseReservationServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseGetReservation._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseGetIamPolicy._get_query_params_json( transcoded_request ) @@ -2695,7 +3399,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -2705,17 +3409,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.GetReservation", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.GetIamPolicy", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "GetReservation", + "rpcName": "GetIamPolicy", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ReservationServiceRestTransport._GetReservation._get_response( + response = ReservationServiceRestTransport._GetIamPolicy._get_response( self._host, metadata, query_params, @@ -2730,21 +3434,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.Reservation() - pb_resp = reservation.Reservation.pb(resp) + resp = policy_pb2.Policy() + pb_resp = resp json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_reservation(resp) + resp = self._interceptor.post_get_iam_policy(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_reservation_with_metadata( + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = reservation.Reservation.to_json(response) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -2753,22 +3457,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.get_reservation", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.get_iam_policy", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "GetReservation", + "rpcName": "GetIamPolicy", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListAssignments( - _BaseReservationServiceRestTransport._BaseListAssignments, + class _GetReservation( + _BaseReservationServiceRestTransport._BaseGetReservation, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.ListAssignments") + return hash("ReservationServiceRestTransport.GetReservation") @staticmethod def _get_response( @@ -2794,18 +3498,18 @@ def _get_response( def __call__( self, - request: reservation.ListAssignmentsRequest, + request: reservation.GetReservationRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.ListAssignmentsResponse: - r"""Call the list assignments method over HTTP. + ) -> reservation.Reservation: + r"""Call the get reservation method over HTTP. Args: - request (~.reservation.ListAssignmentsRequest): + request (~.reservation.GetReservationRequest): The request object. The request for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + [ReservationService.GetReservation][google.cloud.bigquery.reservation.v1.ReservationService.GetReservation]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2815,25 +3519,23 @@ def __call__( be of type `bytes`. Returns: - ~.reservation.ListAssignmentsResponse: - The response for - [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. + ~.reservation.Reservation: + A reservation is a mechanism used to + guarantee slots to users. """ http_options = ( - _BaseReservationServiceRestTransport._BaseListAssignments._get_http_options() + _BaseReservationServiceRestTransport._BaseGetReservation._get_http_options() ) - request, metadata = self._interceptor.pre_list_assignments( - request, metadata - ) - transcoded_request = _BaseReservationServiceRestTransport._BaseListAssignments._get_transcoded_request( + request, metadata = self._interceptor.pre_get_reservation(request, metadata) + transcoded_request = _BaseReservationServiceRestTransport._BaseGetReservation._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseListAssignments._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseGetReservation._get_query_params_json( transcoded_request ) @@ -2855,17 +3557,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListAssignments", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.GetReservation", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListAssignments", + "rpcName": "GetReservation", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ReservationServiceRestTransport._ListAssignments._get_response( + response = ReservationServiceRestTransport._GetReservation._get_response( self._host, metadata, query_params, @@ -2880,23 +3582,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.ListAssignmentsResponse() - pb_resp = reservation.ListAssignmentsResponse.pb(resp) + resp = reservation.Reservation() + pb_resp = reservation.Reservation.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_assignments(resp) + resp = self._interceptor.post_get_reservation(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_assignments_with_metadata( + resp, _ = self._interceptor.post_get_reservation_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = reservation.ListAssignmentsResponse.to_json( - response - ) + response_payload = reservation.Reservation.to_json(response) except: response_payload = None http_response = { @@ -2905,22 +3605,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_assignments", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.get_reservation", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListAssignments", + "rpcName": "GetReservation", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListCapacityCommitments( - _BaseReservationServiceRestTransport._BaseListCapacityCommitments, + class _GetReservationGroup( + _BaseReservationServiceRestTransport._BaseGetReservationGroup, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.ListCapacityCommitments") + return hash("ReservationServiceRestTransport.GetReservationGroup") @staticmethod def _get_response( @@ -2946,18 +3646,18 @@ def _get_response( def __call__( self, - request: reservation.ListCapacityCommitmentsRequest, + request: reservation.GetReservationGroupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.ListCapacityCommitmentsResponse: - r"""Call the list capacity commitments method over HTTP. + ) -> reservation.ReservationGroup: + r"""Call the get reservation group method over HTTP. Args: - request (~.reservation.ListCapacityCommitmentsRequest): + request (~.reservation.GetReservationGroupRequest): The request object. The request for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + [ReservationService.GetReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.GetReservationGroup]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2967,25 +3667,25 @@ def __call__( be of type `bytes`. Returns: - ~.reservation.ListCapacityCommitmentsResponse: - The response for - [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + ~.reservation.ReservationGroup: + A reservation group is a container + for reservations. """ http_options = ( - _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_http_options() + _BaseReservationServiceRestTransport._BaseGetReservationGroup._get_http_options() ) - request, metadata = self._interceptor.pre_list_capacity_commitments( + request, metadata = self._interceptor.pre_get_reservation_group( request, metadata ) - transcoded_request = _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_transcoded_request( + transcoded_request = _BaseReservationServiceRestTransport._BaseGetReservationGroup._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseGetReservationGroup._get_query_params_json( transcoded_request ) @@ -3007,10 +3707,10 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListCapacityCommitments", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.GetReservationGroup", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListCapacityCommitments", + "rpcName": "GetReservationGroup", "httpRequest": http_request, "metadata": http_request["headers"], }, @@ -3018,7 +3718,7 @@ def __call__( # Send the request response = ( - ReservationServiceRestTransport._ListCapacityCommitments._get_response( + ReservationServiceRestTransport._GetReservationGroup._get_response( self._host, metadata, query_params, @@ -3034,23 +3734,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.ListCapacityCommitmentsResponse() - pb_resp = reservation.ListCapacityCommitmentsResponse.pb(resp) + resp = reservation.ReservationGroup() + pb_resp = reservation.ReservationGroup.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_capacity_commitments(resp) + resp = self._interceptor.post_get_reservation_group(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_capacity_commitments_with_metadata( + resp, _ = self._interceptor.post_get_reservation_group_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = ( - reservation.ListCapacityCommitmentsResponse.to_json(response) - ) + response_payload = reservation.ReservationGroup.to_json(response) except: response_payload = None http_response = { @@ -3059,22 +3757,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_capacity_commitments", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.get_reservation_group", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListCapacityCommitments", + "rpcName": "GetReservationGroup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ListReservations( - _BaseReservationServiceRestTransport._BaseListReservations, + class _ListAssignments( + _BaseReservationServiceRestTransport._BaseListAssignments, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.ListReservations") + return hash("ReservationServiceRestTransport.ListAssignments") @staticmethod def _get_response( @@ -3100,18 +3798,18 @@ def _get_response( def __call__( self, - request: reservation.ListReservationsRequest, + request: reservation.ListAssignmentsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.ListReservationsResponse: - r"""Call the list reservations method over HTTP. + ) -> reservation.ListAssignmentsResponse: + r"""Call the list assignments method over HTTP. Args: - request (~.reservation.ListReservationsRequest): + request (~.reservation.ListAssignmentsRequest): The request object. The request for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3121,25 +3819,25 @@ def __call__( be of type `bytes`. Returns: - ~.reservation.ListReservationsResponse: + ~.reservation.ListAssignmentsResponse: The response for - [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + [ReservationService.ListAssignments][google.cloud.bigquery.reservation.v1.ReservationService.ListAssignments]. """ http_options = ( - _BaseReservationServiceRestTransport._BaseListReservations._get_http_options() + _BaseReservationServiceRestTransport._BaseListAssignments._get_http_options() ) - request, metadata = self._interceptor.pre_list_reservations( + request, metadata = self._interceptor.pre_list_assignments( request, metadata ) - transcoded_request = _BaseReservationServiceRestTransport._BaseListReservations._get_transcoded_request( + transcoded_request = _BaseReservationServiceRestTransport._BaseListAssignments._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseListReservations._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseListAssignments._get_query_params_json( transcoded_request ) @@ -3161,17 +3859,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListReservations", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListAssignments", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListReservations", + "rpcName": "ListAssignments", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ReservationServiceRestTransport._ListReservations._get_response( + response = ReservationServiceRestTransport._ListAssignments._get_response( self._host, metadata, query_params, @@ -3186,21 +3884,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.ListReservationsResponse() - pb_resp = reservation.ListReservationsResponse.pb(resp) + resp = reservation.ListAssignmentsResponse() + pb_resp = reservation.ListAssignmentsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_reservations(resp) + resp = self._interceptor.post_list_assignments(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_list_reservations_with_metadata( + resp, _ = self._interceptor.post_list_assignments_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = reservation.ListReservationsResponse.to_json( + response_payload = reservation.ListAssignmentsResponse.to_json( response ) except: @@ -3211,22 +3909,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_reservations", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_assignments", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "ListReservations", + "rpcName": "ListAssignments", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _MergeCapacityCommitments( - _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments, + class _ListCapacityCommitments( + _BaseReservationServiceRestTransport._BaseListCapacityCommitments, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.MergeCapacityCommitments") + return hash("ReservationServiceRestTransport.ListCapacityCommitments") @staticmethod def _get_response( @@ -3247,69 +3945,51 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: reservation.MergeCapacityCommitmentsRequest, + request: reservation.ListCapacityCommitmentsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.CapacityCommitment: - r"""Call the merge capacity - commitments method over HTTP. - - Args: - request (~.reservation.MergeCapacityCommitmentsRequest): - The request object. The request for - [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - ~.reservation.CapacityCommitment: - Capacity commitment is a way to - purchase compute capacity for BigQuery - jobs (in the form of slots) with some - committed period of usage. Annual - commitments renew by default. - Commitments can be removed after their - commitment end time passes. + ) -> reservation.ListCapacityCommitmentsResponse: + r"""Call the list capacity commitments method over HTTP. - In order to remove annual commitment, - its plan needs to be changed to monthly - or flex first. + Args: + request (~.reservation.ListCapacityCommitmentsRequest): + The request object. The request for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - A capacity commitment resource exists as - a child resource of the admin project. + Returns: + ~.reservation.ListCapacityCommitmentsResponse: + The response for + [ReservationService.ListCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.ListCapacityCommitments]. """ http_options = ( - _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_http_options() + _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_http_options() ) - request, metadata = self._interceptor.pre_merge_capacity_commitments( + request, metadata = self._interceptor.pre_list_capacity_commitments( request, metadata ) - transcoded_request = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_transcoded_request( + transcoded_request = _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_transcoded_request( http_options, request ) - body = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_request_body_json( - transcoded_request - ) - # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseListCapacityCommitments._get_query_params_json( transcoded_request ) @@ -3331,10 +4011,10 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.MergeCapacityCommitments", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListCapacityCommitments", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "MergeCapacityCommitments", + "rpcName": "ListCapacityCommitments", "httpRequest": http_request, "metadata": http_request["headers"], }, @@ -3342,14 +4022,13 @@ def __call__( # Send the request response = ( - ReservationServiceRestTransport._MergeCapacityCommitments._get_response( + ReservationServiceRestTransport._ListCapacityCommitments._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) ) @@ -3359,21 +4038,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.CapacityCommitment() - pb_resp = reservation.CapacityCommitment.pb(resp) + resp = reservation.ListCapacityCommitmentsResponse() + pb_resp = reservation.ListCapacityCommitmentsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_merge_capacity_commitments(resp) + resp = self._interceptor.post_list_capacity_commitments(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_merge_capacity_commitments_with_metadata( + resp, _ = self._interceptor.post_list_capacity_commitments_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = reservation.CapacityCommitment.to_json(response) + response_payload = ( + reservation.ListCapacityCommitmentsResponse.to_json(response) + ) except: response_payload = None http_response = { @@ -3382,22 +4063,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.merge_capacity_commitments", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_capacity_commitments", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "MergeCapacityCommitments", + "rpcName": "ListCapacityCommitments", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _MoveAssignment( - _BaseReservationServiceRestTransport._BaseMoveAssignment, + class _ListReservationGroups( + _BaseReservationServiceRestTransport._BaseListReservationGroups, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.MoveAssignment") + return hash("ReservationServiceRestTransport.ListReservationGroups") @staticmethod def _get_response( @@ -3418,31 +4099,820 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: reservation.MoveAssignmentRequest, + request: reservation.ListReservationGroupsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.Assignment: - r"""Call the move assignment method over HTTP. + ) -> reservation.ListReservationGroupsResponse: + r"""Call the list reservation groups method over HTTP. Args: - request (~.reservation.MoveAssignmentRequest): + request (~.reservation.ListReservationGroupsRequest): The request object. The request for - [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. - - **Note**: "bigquery.reservationAssignments.create" - permission is required on the destination_id. + [ReservationService.ListReservationGroups][google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.reservation.ListReservationGroupsResponse: + The response for + [ReservationService.ListReservationGroups][google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups]. + + """ + + http_options = ( + _BaseReservationServiceRestTransport._BaseListReservationGroups._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_reservation_groups( + request, metadata + ) + transcoded_request = _BaseReservationServiceRestTransport._BaseListReservationGroups._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseReservationServiceRestTransport._BaseListReservationGroups._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListReservationGroups", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "ListReservationGroups", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + ReservationServiceRestTransport._ListReservationGroups._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.ListReservationGroupsResponse() + pb_resp = reservation.ListReservationGroupsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_reservation_groups(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_reservation_groups_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + reservation.ListReservationGroupsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_reservation_groups", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "ListReservationGroups", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListReservations( + _BaseReservationServiceRestTransport._BaseListReservations, + ReservationServiceRestStub, + ): + def __hash__(self): + return hash("ReservationServiceRestTransport.ListReservations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: reservation.ListReservationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.ListReservationsResponse: + r"""Call the list reservations method over HTTP. + + Args: + request (~.reservation.ListReservationsRequest): + The request object. The request for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.reservation.ListReservationsResponse: + The response for + [ReservationService.ListReservations][google.cloud.bigquery.reservation.v1.ReservationService.ListReservations]. + + """ + + http_options = ( + _BaseReservationServiceRestTransport._BaseListReservations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_reservations( + request, metadata + ) + transcoded_request = _BaseReservationServiceRestTransport._BaseListReservations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseReservationServiceRestTransport._BaseListReservations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.ListReservations", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "ListReservations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ReservationServiceRestTransport._ListReservations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.ListReservationsResponse() + pb_resp = reservation.ListReservationsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_reservations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_reservations_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = reservation.ListReservationsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.list_reservations", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "ListReservations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _MergeCapacityCommitments( + _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments, + ReservationServiceRestStub, + ): + def __hash__(self): + return hash("ReservationServiceRestTransport.MergeCapacityCommitments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: reservation.MergeCapacityCommitmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.CapacityCommitment: + r"""Call the merge capacity + commitments method over HTTP. + + Args: + request (~.reservation.MergeCapacityCommitmentsRequest): + The request object. The request for + [ReservationService.MergeCapacityCommitments][google.cloud.bigquery.reservation.v1.ReservationService.MergeCapacityCommitments]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.reservation.CapacityCommitment: + Capacity commitment is a way to + purchase compute capacity for BigQuery + jobs (in the form of slots) with some + committed period of usage. Annual + commitments renew by default. + Commitments can be removed after their + commitment end time passes. + + In order to remove annual commitment, + its plan needs to be changed to monthly + or flex first. + + A capacity commitment resource exists as + a child resource of the admin project. + + """ + + http_options = ( + _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_http_options() + ) + + request, metadata = self._interceptor.pre_merge_capacity_commitments( + request, metadata + ) + transcoded_request = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_transcoded_request( + http_options, request + ) + + body = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseReservationServiceRestTransport._BaseMergeCapacityCommitments._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.MergeCapacityCommitments", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "MergeCapacityCommitments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + ReservationServiceRestTransport._MergeCapacityCommitments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.CapacityCommitment() + pb_resp = reservation.CapacityCommitment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_merge_capacity_commitments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_merge_capacity_commitments_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = reservation.CapacityCommitment.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.merge_capacity_commitments", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "MergeCapacityCommitments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _MoveAssignment( + _BaseReservationServiceRestTransport._BaseMoveAssignment, + ReservationServiceRestStub, + ): + def __hash__(self): + return hash("ReservationServiceRestTransport.MoveAssignment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: reservation.MoveAssignmentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.Assignment: + r"""Call the move assignment method over HTTP. + + Args: + request (~.reservation.MoveAssignmentRequest): + The request object. The request for + [ReservationService.MoveAssignment][google.cloud.bigquery.reservation.v1.ReservationService.MoveAssignment]. + + **Note**: "bigquery.reservationAssignments.create" + permission is required on the destination_id. + + **Note**: "bigquery.reservationAssignments.create" and + "bigquery.reservationAssignments.delete" permission are + required on the related assignee. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.reservation.Assignment: + An assignment allows a project to + submit jobs of a certain type using + slots from the specified reservation. + + """ + + http_options = ( + _BaseReservationServiceRestTransport._BaseMoveAssignment._get_http_options() + ) + + request, metadata = self._interceptor.pre_move_assignment(request, metadata) + transcoded_request = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_transcoded_request( + http_options, request + ) + + body = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.MoveAssignment", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "MoveAssignment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ReservationServiceRestTransport._MoveAssignment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.Assignment() + pb_resp = reservation.Assignment.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_move_assignment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_move_assignment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = reservation.Assignment.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.move_assignment", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "MoveAssignment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SearchAllAssignments( + _BaseReservationServiceRestTransport._BaseSearchAllAssignments, + ReservationServiceRestStub, + ): + def __hash__(self): + return hash("ReservationServiceRestTransport.SearchAllAssignments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: reservation.SearchAllAssignmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.SearchAllAssignmentsResponse: + r"""Call the search all assignments method over HTTP. + + Args: + request (~.reservation.SearchAllAssignmentsRequest): + The request object. The request for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + Note: "bigquery.reservationAssignments.search" + permission is required on the related assignee. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.reservation.SearchAllAssignmentsResponse: + The response for + [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + + """ + + http_options = ( + _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_http_options() + ) + + request, metadata = self._interceptor.pre_search_all_assignments( + request, metadata + ) + transcoded_request = _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SearchAllAssignments", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "SearchAllAssignments", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + ReservationServiceRestTransport._SearchAllAssignments._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = reservation.SearchAllAssignmentsResponse() + pb_resp = reservation.SearchAllAssignmentsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_search_all_assignments(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_assignments_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = reservation.SearchAllAssignmentsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.search_all_assignments", + extra={ + "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", + "rpcName": "SearchAllAssignments", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SearchAssignments( + _BaseReservationServiceRestTransport._BaseSearchAssignments, + ReservationServiceRestStub, + ): + def __hash__(self): + return hash("ReservationServiceRestTransport.SearchAssignments") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: reservation.SearchAssignmentsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> reservation.SearchAssignmentsResponse: + r"""Call the search assignments method over HTTP. - **Note**: "bigquery.reservationAssignments.create" and - "bigquery.reservationAssignments.delete" permission are - required on the related assignee. + Args: + request (~.reservation.SearchAssignmentsRequest): + The request object. The request for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + Note: "bigquery.reservationAssignments.search" + permission is required on the related assignee. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3452,28 +4922,25 @@ def __call__( be of type `bytes`. Returns: - ~.reservation.Assignment: - An assignment allows a project to - submit jobs of a certain type using - slots from the specified reservation. + ~.reservation.SearchAssignmentsResponse: + The response for + [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. """ http_options = ( - _BaseReservationServiceRestTransport._BaseMoveAssignment._get_http_options() + _BaseReservationServiceRestTransport._BaseSearchAssignments._get_http_options() ) - request, metadata = self._interceptor.pre_move_assignment(request, metadata) - transcoded_request = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_transcoded_request( - http_options, request + request, metadata = self._interceptor.pre_search_assignments( + request, metadata ) - - body = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_request_body_json( - transcoded_request + transcoded_request = _BaseReservationServiceRestTransport._BaseSearchAssignments._get_transcoded_request( + http_options, request ) # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseMoveAssignment._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseSearchAssignments._get_query_params_json( transcoded_request ) @@ -3495,24 +4962,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.MoveAssignment", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SearchAssignments", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "MoveAssignment", + "rpcName": "SearchAssignments", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ReservationServiceRestTransport._MoveAssignment._get_response( + response = ReservationServiceRestTransport._SearchAssignments._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3521,21 +4987,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.Assignment() - pb_resp = reservation.Assignment.pb(resp) + resp = reservation.SearchAssignmentsResponse() + pb_resp = reservation.SearchAssignmentsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_move_assignment(resp) + resp = self._interceptor.post_search_assignments(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_move_assignment_with_metadata( + resp, _ = self._interceptor.post_search_assignments_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = reservation.Assignment.to_json(response) + response_payload = reservation.SearchAssignmentsResponse.to_json( + response + ) except: response_payload = None http_response = { @@ -3544,22 +5012,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.move_assignment", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.search_assignments", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "MoveAssignment", + "rpcName": "SearchAssignments", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _SearchAllAssignments( - _BaseReservationServiceRestTransport._BaseSearchAllAssignments, + class _SetIamPolicy( + _BaseReservationServiceRestTransport._BaseSetIamPolicy, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.SearchAllAssignments") + return hash("ReservationServiceRestTransport.SetIamPolicy") @staticmethod def _get_response( @@ -3580,25 +5048,23 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: reservation.SearchAllAssignmentsRequest, + request: iam_policy_pb2.SetIamPolicyRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.SearchAllAssignmentsResponse: - r"""Call the search all assignments method over HTTP. + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. Args: - request (~.reservation.SearchAllAssignmentsRequest): - The request object. The request for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. - Note: "bigquery.reservationAssignments.search" - permission is required on the related assignee. + request (~.iam_policy_pb2.SetIamPolicyRequest): + The request object. Request message for ``SetIamPolicy`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3608,25 +5074,100 @@ def __call__( be of type `bytes`. Returns: - ~.reservation.SearchAllAssignmentsResponse: - The response for - [ReservationService.SearchAllAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAllAssignments]. + ~.policy_pb2.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. """ http_options = ( - _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_http_options() + _BaseReservationServiceRestTransport._BaseSetIamPolicy._get_http_options() ) - request, metadata = self._interceptor.pre_search_all_assignments( - request, metadata - ) - transcoded_request = _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_transcoded_request( + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseReservationServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( http_options, request ) + body = _BaseReservationServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseSearchAllAssignments._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseSetIamPolicy._get_query_params_json( transcoded_request ) @@ -3638,7 +5179,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -3648,25 +5189,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SearchAllAssignments", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SetIamPolicy", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SearchAllAssignments", + "rpcName": "SetIamPolicy", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ( - ReservationServiceRestTransport._SearchAllAssignments._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - ) + response = ReservationServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3675,23 +5215,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.SearchAllAssignmentsResponse() - pb_resp = reservation.SearchAllAssignmentsResponse.pb(resp) + resp = policy_pb2.Policy() + pb_resp = resp json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_search_all_assignments(resp) + resp = self._interceptor.post_set_iam_policy(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_all_assignments_with_metadata( + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = reservation.SearchAllAssignmentsResponse.to_json( - response - ) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -3700,22 +5238,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.search_all_assignments", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.set_iam_policy", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SearchAllAssignments", + "rpcName": "SetIamPolicy", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _SearchAssignments( - _BaseReservationServiceRestTransport._BaseSearchAssignments, + class _SplitCapacityCommitment( + _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.SearchAssignments") + return hash("ReservationServiceRestTransport.SplitCapacityCommitment") @staticmethod def _get_response( @@ -3736,25 +5274,24 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: reservation.SearchAssignmentsRequest, + request: reservation.SplitCapacityCommitmentRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.SearchAssignmentsResponse: - r"""Call the search assignments method over HTTP. + ) -> reservation.SplitCapacityCommitmentResponse: + r"""Call the split capacity commitment method over HTTP. Args: - request (~.reservation.SearchAssignmentsRequest): + request (~.reservation.SplitCapacityCommitmentRequest): The request object. The request for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. - Note: "bigquery.reservationAssignments.search" - permission is required on the related assignee. + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3764,25 +5301,29 @@ def __call__( be of type `bytes`. Returns: - ~.reservation.SearchAssignmentsResponse: + ~.reservation.SplitCapacityCommitmentResponse: The response for - [ReservationService.SearchAssignments][google.cloud.bigquery.reservation.v1.ReservationService.SearchAssignments]. + [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. """ http_options = ( - _BaseReservationServiceRestTransport._BaseSearchAssignments._get_http_options() + _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_http_options() ) - request, metadata = self._interceptor.pre_search_assignments( + request, metadata = self._interceptor.pre_split_capacity_commitment( request, metadata ) - transcoded_request = _BaseReservationServiceRestTransport._BaseSearchAssignments._get_transcoded_request( + transcoded_request = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_transcoded_request( http_options, request ) + body = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_request_body_json( + transcoded_request + ) + # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseSearchAssignments._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_query_params_json( transcoded_request ) @@ -3804,23 +5345,26 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SearchAssignments", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SplitCapacityCommitment", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SearchAssignments", + "rpcName": "SplitCapacityCommitment", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ReservationServiceRestTransport._SearchAssignments._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, + response = ( + ReservationServiceRestTransport._SplitCapacityCommitment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3829,22 +5373,22 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.SearchAssignmentsResponse() - pb_resp = reservation.SearchAssignmentsResponse.pb(resp) + resp = reservation.SplitCapacityCommitmentResponse() + pb_resp = reservation.SplitCapacityCommitmentResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_search_assignments(resp) + resp = self._interceptor.post_split_capacity_commitment(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_search_assignments_with_metadata( + resp, _ = self._interceptor.post_split_capacity_commitment_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = reservation.SearchAssignmentsResponse.to_json( - response + response_payload = ( + reservation.SplitCapacityCommitmentResponse.to_json(response) ) except: response_payload = None @@ -3854,22 +5398,22 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.search_assignments", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.split_capacity_commitment", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SearchAssignments", + "rpcName": "SplitCapacityCommitment", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _SplitCapacityCommitment( - _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment, + class _TestIamPermissions( + _BaseReservationServiceRestTransport._BaseTestIamPermissions, ReservationServiceRestStub, ): def __hash__(self): - return hash("ReservationServiceRestTransport.SplitCapacityCommitment") + return hash("ReservationServiceRestTransport.TestIamPermissions") @staticmethod def _get_response( @@ -3896,18 +5440,17 @@ def _get_response( def __call__( self, - request: reservation.SplitCapacityCommitmentRequest, + request: iam_policy_pb2.TestIamPermissionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> reservation.SplitCapacityCommitmentResponse: - r"""Call the split capacity commitment method over HTTP. + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. Args: - request (~.reservation.SplitCapacityCommitmentRequest): - The request object. The request for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. + request (~.iam_policy_pb2.TestIamPermissionsRequest): + The request object. Request message for ``TestIamPermissions`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3917,29 +5460,27 @@ def __call__( be of type `bytes`. Returns: - ~.reservation.SplitCapacityCommitmentResponse: - The response for - [ReservationService.SplitCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.SplitCapacityCommitment]. - + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. """ http_options = ( - _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_http_options() + _BaseReservationServiceRestTransport._BaseTestIamPermissions._get_http_options() ) - request, metadata = self._interceptor.pre_split_capacity_commitment( + request, metadata = self._interceptor.pre_test_iam_permissions( request, metadata ) - transcoded_request = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_transcoded_request( + transcoded_request = _BaseReservationServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( http_options, request ) - body = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_request_body_json( + body = _BaseReservationServiceRestTransport._BaseTestIamPermissions._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = _BaseReservationServiceRestTransport._BaseSplitCapacityCommitment._get_query_params_json( + query_params = _BaseReservationServiceRestTransport._BaseTestIamPermissions._get_query_params_json( transcoded_request ) @@ -3951,7 +5492,7 @@ def __call__( ) method = transcoded_request["method"] try: - request_payload = type(request).to_json(request) + request_payload = json_format.MessageToJson(request) except: request_payload = None http_request = { @@ -3961,10 +5502,10 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.SplitCapacityCommitment", + f"Sending request for google.cloud.bigquery.reservation_v1.ReservationServiceClient.TestIamPermissions", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SplitCapacityCommitment", + "rpcName": "TestIamPermissions", "httpRequest": http_request, "metadata": http_request["headers"], }, @@ -3972,7 +5513,7 @@ def __call__( # Send the request response = ( - ReservationServiceRestTransport._SplitCapacityCommitment._get_response( + ReservationServiceRestTransport._TestIamPermissions._get_response( self._host, metadata, query_params, @@ -3989,23 +5530,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = reservation.SplitCapacityCommitmentResponse() - pb_resp = reservation.SplitCapacityCommitmentResponse.pb(resp) + resp = iam_policy_pb2.TestIamPermissionsResponse() + pb_resp = resp json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_split_capacity_commitment(resp) + resp = self._interceptor.post_test_iam_permissions(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_split_capacity_commitment_with_metadata( + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = ( - reservation.SplitCapacityCommitmentResponse.to_json(response) - ) + response_payload = json_format.MessageToJson(resp) except: response_payload = None http_response = { @@ -4014,10 +5553,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.split_capacity_commitment", + "Received response for google.cloud.bigquery.reservation_v1.ReservationServiceClient.test_iam_permissions", extra={ "serviceName": "google.cloud.bigquery.reservation.v1.ReservationService", - "rpcName": "SplitCapacityCommitment", + "rpcName": "TestIamPermissions", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -4691,6 +6230,16 @@ def create_reservation( # In C++ this would require a dynamic_cast return self._CreateReservation(self._session, self._host, self._interceptor) # type: ignore + @property + def create_reservation_group( + self, + ) -> Callable[ + [reservation.CreateReservationGroupRequest], reservation.ReservationGroup + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateReservationGroup(self._session, self._host, self._interceptor) # type: ignore + @property def delete_assignment( self, @@ -4715,6 +6264,14 @@ def delete_reservation( # In C++ this would require a dynamic_cast return self._DeleteReservation(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_reservation_group( + self, + ) -> Callable[[reservation.DeleteReservationGroupRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteReservationGroup(self._session, self._host, self._interceptor) # type: ignore + @property def failover_reservation( self, @@ -4741,6 +6298,14 @@ def get_capacity_commitment( # In C++ this would require a dynamic_cast return self._GetCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def get_reservation( self, @@ -4749,6 +6314,16 @@ def get_reservation( # In C++ this would require a dynamic_cast return self._GetReservation(self._session, self._host, self._interceptor) # type: ignore + @property + def get_reservation_group( + self, + ) -> Callable[ + [reservation.GetReservationGroupRequest], reservation.ReservationGroup + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetReservationGroup(self._session, self._host, self._interceptor) # type: ignore + @property def list_assignments( self, @@ -4770,6 +6345,17 @@ def list_capacity_commitments( # In C++ this would require a dynamic_cast return self._ListCapacityCommitments(self._session, self._host, self._interceptor) # type: ignore + @property + def list_reservation_groups( + self, + ) -> Callable[ + [reservation.ListReservationGroupsRequest], + reservation.ListReservationGroupsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListReservationGroups(self._session, self._host, self._interceptor) # type: ignore + @property def list_reservations( self, @@ -4819,6 +6405,14 @@ def search_assignments( # In C++ this would require a dynamic_cast return self._SearchAssignments(self._session, self._host, self._interceptor) # type: ignore + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + @property def split_capacity_commitment( self, @@ -4830,6 +6424,17 @@ def split_capacity_commitment( # In C++ this would require a dynamic_cast return self._SplitCapacityCommitment(self._session, self._host, self._interceptor) # type: ignore + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + @property def update_assignment( self, diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest_base.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest_base.py index 2975a9f9f769..05dc1f45f992 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest_base.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/services/reservation_service/transports/rest_base.py @@ -18,6 +18,8 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1, path_template +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import json_format @@ -260,6 +262,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateReservationGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "reservationGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/reservationGroups", + "body": "reservation_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = reservation.CreateReservationGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseReservationServiceRestTransport._BaseCreateReservationGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteAssignment: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -401,6 +462,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteReservationGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/reservationGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = reservation.DeleteReservationGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseReservationServiceRestTransport._BaseDeleteReservationGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseFailoverReservation: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -552,6 +660,57 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/reservations/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/locations/*/reservations/*/assignments/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseReservationServiceRestTransport._BaseGetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetReservation: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -599,6 +758,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetReservationGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/reservationGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = reservation.GetReservationGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseReservationServiceRestTransport._BaseGetReservationGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListAssignments: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -693,6 +899,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListReservationGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/reservationGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = reservation.ListReservationGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseReservationServiceRestTransport._BaseListReservationGroups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListReservations: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -933,6 +1186,68 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/reservations/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/reservations/*/assignments/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseReservationServiceRestTransport._BaseSetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseSplitCapacityCommitment: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -990,6 +1305,68 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/reservations/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/locations/*/reservations/*/assignments/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = request + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseReservationServiceRestTransport._BaseTestIamPermissions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUpdateAssignment: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/__init__.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/__init__.py index 74bde8d86142..5b351a4b3896 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/__init__.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/__init__.py @@ -19,25 +19,32 @@ CapacityCommitment, CreateAssignmentRequest, CreateCapacityCommitmentRequest, + CreateReservationGroupRequest, CreateReservationRequest, DeleteAssignmentRequest, DeleteCapacityCommitmentRequest, + DeleteReservationGroupRequest, DeleteReservationRequest, Edition, FailoverMode, FailoverReservationRequest, GetBiReservationRequest, GetCapacityCommitmentRequest, + GetReservationGroupRequest, GetReservationRequest, ListAssignmentsRequest, ListAssignmentsResponse, ListCapacityCommitmentsRequest, ListCapacityCommitmentsResponse, + ListReservationGroupsRequest, + ListReservationGroupsResponse, ListReservationsRequest, ListReservationsResponse, MergeCapacityCommitmentsRequest, MoveAssignmentRequest, Reservation, + ReservationGroup, + SchedulingPolicy, SearchAllAssignmentsRequest, SearchAllAssignmentsResponse, SearchAssignmentsRequest, @@ -57,23 +64,30 @@ "CapacityCommitment", "CreateAssignmentRequest", "CreateCapacityCommitmentRequest", + "CreateReservationGroupRequest", "CreateReservationRequest", "DeleteAssignmentRequest", "DeleteCapacityCommitmentRequest", + "DeleteReservationGroupRequest", "DeleteReservationRequest", "FailoverReservationRequest", "GetBiReservationRequest", "GetCapacityCommitmentRequest", + "GetReservationGroupRequest", "GetReservationRequest", "ListAssignmentsRequest", "ListAssignmentsResponse", "ListCapacityCommitmentsRequest", "ListCapacityCommitmentsResponse", + "ListReservationGroupsRequest", + "ListReservationGroupsResponse", "ListReservationsRequest", "ListReservationsResponse", "MergeCapacityCommitmentsRequest", "MoveAssignmentRequest", "Reservation", + "ReservationGroup", + "SchedulingPolicy", "SearchAllAssignmentsRequest", "SearchAllAssignmentsResponse", "SearchAssignmentsRequest", diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py index ffe1ad24e092..96d5cd21d988 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py @@ -28,6 +28,8 @@ "Edition", "FailoverMode", "Reservation", + "SchedulingPolicy", + "ReservationGroup", "CapacityCommitment", "CreateReservationRequest", "ListReservationsRequest", @@ -36,6 +38,11 @@ "DeleteReservationRequest", "UpdateReservationRequest", "FailoverReservationRequest", + "CreateReservationGroupRequest", + "GetReservationGroupRequest", + "ListReservationGroupsRequest", + "ListReservationGroupsResponse", + "DeleteReservationGroupRequest", "CreateCapacityCommitmentRequest", "ListCapacityCommitmentsRequest", "ListCapacityCommitmentsResponse", @@ -89,7 +96,7 @@ class Edition(proto.Enum): class FailoverMode(proto.Enum): r"""The failover mode when a user initiates a failover on a - reservation determines how writes that arepending replication + reservation determines how writes that are pending replication are handled after the failover is initiated. Values: @@ -121,15 +128,15 @@ class Reservation(proto.Message): Attributes: name (str): - The resource name of the reservation, e.g., + Identifier. The resource name of the reservation, e.g., ``projects/*/locations/*/reservations/team1-prod``. The reservation_id must only contain lower case alphanumeric characters or dashes. It must start with a letter and must not end with a dash. Its maximum length is 64 characters. slot_capacity (int): - Baseline slots available to this reservation. A slot is a - unit of computational power in BigQuery, and serves as the - unit of parallelism. + Optional. Baseline slots available to this reservation. A + slot is a unit of computational power in BigQuery, and + serves as the unit of parallelism. Queries using this reservation might use more slots during runtime if ignore_idle_slots is set to false, or autoscaling @@ -145,26 +152,25 @@ class Reservation(proto.Message): baseline slots exceed your committed slots. Otherwise, you can decrease your baseline slots every few minutes. ignore_idle_slots (bool): - If false, any query or pipeline job using this reservation - will use idle slots from other reservations within the same - admin project. If true, a query or pipeline job using this - reservation will execute with the slot capacity specified in - the slot_capacity field at most. + Optional. If false, any query or pipeline job using this + reservation will use idle slots from other reservations + within the same admin project. If true, a query or pipeline + job using this reservation will execute with the slot + capacity specified in the slot_capacity field at most. autoscale (google.cloud.bigquery_reservation_v1.types.Reservation.Autoscale): - The configuration parameters for the auto - scaling feature. + Optional. The configuration parameters for + the auto scaling feature. concurrency (int): - Job concurrency target which sets a soft - upper bound on the number of jobs that can run - concurrently in this reservation. This is a soft - target due to asynchronous nature of the system - and various optimizations for small queries. - Default value is 0 which means that concurrency - target will be automatically computed by the - system. - NOTE: this field is exposed as target job - concurrency in the Information Schema, DDL and - BigQuery CLI. + Optional. Job concurrency target which sets a + soft upper bound on the number of jobs that can + run concurrently in this reservation. This is a + soft target due to asynchronous nature of the + system and various optimizations for small + queries. Default value is 0 which means that + concurrency target will be automatically + computed by the system. NOTE: this field is + exposed as target job concurrency in the + Information Schema, DDL and BigQuery CLI. creation_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Creation time of the reservation. @@ -185,7 +191,7 @@ class Reservation(proto.Message): NOTE: this is a preview feature. Project must be allow-listed in order to set this field. edition (google.cloud.bigquery_reservation_v1.types.Edition): - Edition of the reservation. + Optional. Edition of the reservation. primary_location (str): Output only. The current location of the reservation's primary replica. This field is @@ -219,16 +225,19 @@ class Reservation(proto.Message): - baseline. This field must be set together with the scaling_mode enum - value. + value, otherwise the request will be rejected with error + code ``google.rpc.Code.INVALID_ARGUMENT``. If the max_slots and scaling_mode are set, the autoscale or - autoscale.max_slots field must be unset. However, the - autoscale field may still be in the output. The - autopscale.max_slots will always show as 0 and the - autoscaler.current_slots will represent the current slots - from autoscaler excluding idle slots. For example, if the - max_slots is 1000 and scaling_mode is AUTOSCALE_ONLY, then - in the output, the autoscaler.max_slots will be 0 and the + autoscale.max_slots field must be unset. Otherwise the + request will be rejected with error code + ``google.rpc.Code.INVALID_ARGUMENT``. However, the autoscale + field may still be in the output. The autopscale.max_slots + will always show as 0 and the autoscaler.current_slots will + represent the current slots from autoscaler excluding idle + slots. For example, if the max_slots is 1000 and + scaling_mode is AUTOSCALE_ONLY, then in the output, the + autoscaler.max_slots will be 0 and the autoscaler.current_slots may be any value between 0 and 1000. @@ -243,13 +252,15 @@ class Reservation(proto.Message): If the max_slots and scaling_mode are set, then the ignore_idle_slots field must be aligned with the scaling_mode enum value.(See details in ScalingMode - comments). + comments). Otherwise the request will be rejected with error + code ``google.rpc.Code.INVALID_ARGUMENT``. Please note, the max_slots is for user to manage the part of slots greater than the baseline. Therefore, we don't allow users to set max_slots smaller or equal to the baseline as it will not be meaningful. If the field is present and - slot_capacity>=max_slots. + slot_capacity>=max_slots, requests will be rejected with + error code ``google.rpc.Code.INVALID_ARGUMENT``. Please note that if max_slots is set to 0, we will treat it as unset. Customers can set max_slots to 0 and set @@ -259,7 +270,22 @@ class Reservation(proto.Message): This field is a member of `oneof`_ ``_max_slots``. scaling_mode (google.cloud.bigquery_reservation_v1.types.Reservation.ScalingMode): Optional. The scaling mode for the reservation. If the field - is present but max_slots is not present. + is present but max_slots is not present, requests will be + rejected with error code + ``google.rpc.Code.INVALID_ARGUMENT``. + labels (MutableMapping[str, str]): + Optional. The labels associated with this + reservation. You can use these to organize and + group your reservations. You can set this + property when you create or update a + reservation. + reservation_group (str): + Optional. The reservation group that this reservation + belongs to. You can set this property when you create or + update a reservation. Reservations do not need to belong to + a reservation group. Format: + projects/{project}/locations/{location}/reservationGroups/{reservation_group} + or just {reservation_group} replication_status (google.cloud.bigquery_reservation_v1.types.Reservation.ReplicationStatus): Output only. The Disaster Recovery(DR) replication status of the reservation. This is @@ -273,6 +299,13 @@ class Reservation(proto.Message): reservation or the reservation is a DR secondary or that any replication operations on the reservation have succeeded. + scheduling_policy (google.cloud.bigquery_reservation_v1.types.SchedulingPolicy): + Optional. The scheduling policy to use for + jobs and queries running under this reservation. + The scheduling policy controls how the + reservation's resources are distributed. + + This feature is not yet generally available. """ class ScalingMode(proto.Enum): @@ -292,7 +325,8 @@ class ScalingMode(proto.Enum): up to 800 slots and no idle slots will be used. Please note, in this mode, the ignore_idle_slots field must - be set to true. + be set to true. Otherwise the request will be rejected with + error code ``google.rpc.Code.INVALID_ARGUMENT``. IDLE_SLOTS_ONLY (2): The reservation will scale up using only idle slots contributed by other reservations or from unassigned @@ -314,7 +348,8 @@ class ScalingMode(proto.Enum): to max_slots. Please note, in this mode, the ignore_idle_slots field must - be set to false. + be set to false. Otherwise the request will be rejected with + error code ``google.rpc.Code.INVALID_ARGUMENT``. ALL_SLOTS (3): The reservation will scale up using all slots available to it. It will use idle slots contributed by other reservations @@ -335,7 +370,8 @@ class ScalingMode(proto.Enum): baseline and 800 autoscaling slots. Please note, in this mode, the ignore_idle_slots field must - be set to false. + be set to false. Otherwise the request will be rejected with + error code ``google.rpc.Code.INVALID_ARGUMENT``. """ SCALING_MODE_UNSPECIFIED = 0 AUTOSCALE_ONLY = 1 @@ -354,7 +390,8 @@ class Autoscale(proto.Message): the original value and could be larger than max_slots for that brief period (less than one minute) max_slots (int): - Number of slots to be scaled when needed. + Optional. Number of slots to be scaled when + needed. """ current_slots: int = proto.Field( @@ -478,11 +515,85 @@ class ReplicationStatus(proto.Message): number=22, enum=ScalingMode, ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=23, + ) + reservation_group: str = proto.Field( + proto.STRING, + number=25, + ) replication_status: ReplicationStatus = proto.Field( proto.MESSAGE, number=24, message=ReplicationStatus, ) + scheduling_policy: "SchedulingPolicy" = proto.Field( + proto.MESSAGE, + number=27, + message="SchedulingPolicy", + ) + + +class SchedulingPolicy(proto.Message): + r"""The scheduling policy controls how a reservation's resources + are distributed. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + concurrency (int): + Optional. If present and > 0, the reservation + will attempt to limit the concurrency of jobs + running for any particular project within it to + the given value. + + This feature is not yet generally available. + + This field is a member of `oneof`_ ``_concurrency``. + max_slots (int): + Optional. If present and > 0, the reservation + will attempt to limit the slot consumption of + queries running for any particular project + within it to the given value. + + This feature is not yet generally available. + + This field is a member of `oneof`_ ``_max_slots``. + """ + + concurrency: int = proto.Field( + proto.INT64, + number=1, + optional=True, + ) + max_slots: int = proto.Field( + proto.INT64, + number=2, + optional=True, + ) + + +class ReservationGroup(proto.Message): + r"""A reservation group is a container for reservations. + + Attributes: + name (str): + Identifier. The resource name of the reservation group, + e.g., + ``projects/*/locations/*/reservationGroups/team1-prod``. The + reservation_group_id must only contain lower case + alphanumeric characters or dashes. It must start with a + letter and must not end with a dash. Its maximum length is + 64 characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) class CapacityCommitment(proto.Message): @@ -506,9 +617,10 @@ class CapacityCommitment(proto.Message): characters or dashes. It must start with a letter and must not end with a dash. Its maximum length is 64 characters. slot_count (int): - Number of slots in this commitment. + Optional. Number of slots in this commitment. plan (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.CommitmentPlan): - Capacity commitment commitment plan. + Optional. Capacity commitment commitment + plan. state (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.State): Output only. State of the commitment. commitment_start_time (google.protobuf.timestamp_pb2.Timestamp): @@ -521,15 +633,15 @@ class CapacityCommitment(proto.Message): Output only. The end of the current commitment period. It is applicable only for ACTIVE capacity commitments. Note after renewal, commitment_end_time is the time the renewed - commitment expires. So it would be at a time after + commitment expires. So itwould be at a time after commitment_start_time + committed period, because we don't change commitment_start_time , failure_status (google.rpc.status_pb2.Status): Output only. For FAILED commitment plan, provides the reason of failure. renewal_plan (google.cloud.bigquery_reservation_v1.types.CapacityCommitment.CommitmentPlan): - The plan this capacity commitment is converted to after - commitment_end_time passes. Once the plan is changed, + Optional. The plan this capacity commitment is converted to + after commitment_end_time passes. Once the plan is changed, committed period is extended according to commitment plan. Only applicable for ANNUAL and TRIAL commitments. multi_region_auxiliary (bool): @@ -546,7 +658,7 @@ class CapacityCommitment(proto.Message): NOTE: this is a preview feature. Project must be allow-listed in order to set this field. edition (google.cloud.bigquery_reservation_v1.types.Edition): - Edition of the capacity commitment. + Optional. Edition of the capacity commitment. is_flat_rate (bool): Output only. If true, the commitment is a flat-rate commitment, otherwise, it's an edition @@ -867,6 +979,131 @@ class FailoverReservationRequest(proto.Message): ) +class CreateReservationGroupRequest(proto.Message): + r"""The request for + [ReservationService.CreateReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.CreateReservationGroup]. + + Attributes: + parent (str): + Required. Project, location. E.g., + ``projects/myproject/locations/US`` + reservation_group_id (str): + Required. The reservation group ID. It must + only contain lower case alphanumeric characters + or dashes. It must start with a letter and must + not end with a dash. Its maximum length is 64 + characters. + reservation_group (google.cloud.bigquery_reservation_v1.types.ReservationGroup): + Required. New Reservation Group to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + reservation_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + reservation_group: "ReservationGroup" = proto.Field( + proto.MESSAGE, + number=3, + message="ReservationGroup", + ) + + +class GetReservationGroupRequest(proto.Message): + r"""The request for + [ReservationService.GetReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.GetReservationGroup]. + + Attributes: + name (str): + Required. Resource name of the reservation group to + retrieve. E.g., + ``projects/myproject/locations/US/reservationGroups/team1-prod`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListReservationGroupsRequest(proto.Message): + r"""The request for + [ReservationService.ListReservationGroups][google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups]. + + Attributes: + parent (str): + Required. The parent resource name containing project and + location, e.g.: ``projects/myproject/locations/US`` + page_size (int): + The maximum number of items to return per + page. + page_token (str): + The next_page_token value returned from a previous List + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListReservationGroupsResponse(proto.Message): + r"""The response for + [ReservationService.ListReservationGroups][google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups]. + + Attributes: + reservation_groups (MutableSequence[google.cloud.bigquery_reservation_v1.types.ReservationGroup]): + List of reservations visible to the user. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + reservation_groups: MutableSequence["ReservationGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ReservationGroup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteReservationGroupRequest(proto.Message): + r"""The request for + [ReservationService.DeleteReservationGroup][google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservationGroup]. + + Attributes: + name (str): + Required. Resource name of the reservation group to + retrieve. E.g., + ``projects/myproject/locations/US/reservationGroups/team1-prod`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class CreateCapacityCommitmentRequest(proto.Message): r"""The request for [ReservationService.CreateCapacityCommitment][google.cloud.bigquery.reservation.v1.ReservationService.CreateCapacityCommitment]. @@ -1098,6 +1335,14 @@ class MergeCapacityCommitmentsRequest(proto.Message): ID is the last portion of capacity commitment name e.g., 'abc' for projects/myproject/locations/US/capacityCommitments/abc + capacity_commitment_id (str): + Optional. The optional resulting capacity + commitment ID. Capacity commitment name will be + generated automatically if this field is empty. + This field must only contain lower case + alphanumeric characters or dashes. The first and + last character cannot be a dash. Max length is + 64 characters. """ parent: str = proto.Field( @@ -1108,6 +1353,10 @@ class MergeCapacityCommitmentsRequest(proto.Message): proto.STRING, number=2, ) + capacity_commitment_id: str = proto.Field( + proto.STRING, + number=3, + ) class Assignment(proto.Message): @@ -1121,11 +1370,12 @@ class Assignment(proto.Message): The assignment_id must only contain lower case alphanumeric characters or dashes and the max length is 64 characters. assignee (str): - The resource which will use the reservation. E.g. + Optional. The resource which will use the reservation. E.g. ``projects/myproject``, ``folders/123``, or ``organizations/456``. job_type (google.cloud.bigquery_reservation_v1.types.Assignment.JobType): - Which type of jobs will use the reservation. + Optional. Which type of jobs will use the + reservation. state (google.cloud.bigquery_reservation_v1.types.Assignment.State): Output only. State of the assignment. enable_gemini_in_bigquery (bool): @@ -1138,6 +1388,15 @@ class Assignment(proto.Message): parent reservation edition is ENTERPRISE_PLUS, then the assignment will give the grantee project/organization access to "Gemini in BigQuery" features. + scheduling_policy (google.cloud.bigquery_reservation_v1.types.SchedulingPolicy): + Optional. The scheduling policy to use for + jobs and queries of this assignee when running + under the associated reservation. The scheduling + policy controls how the reservation's resources + are distributed. This overrides the default + scheduling policy specified on the reservation. + + This feature is not yet generally available. """ class JobType(proto.Enum): @@ -1166,6 +1425,25 @@ class JobType(proto.Enum): reservation. Reservations with continuous assignments cannot be mixed with non-continuous assignments. + BACKGROUND_CHANGE_DATA_CAPTURE (7): + Finer granularity background jobs for + capturing changes in a source database and + streaming them into BigQuery. Reservations with + this job type take priority over a default + BACKGROUND reservation assignment (if it + exists). + BACKGROUND_COLUMN_METADATA_INDEX (8): + Finer granularity background jobs for + refreshing cached metadata for BigQuery tables. + Reservations with this job type take priority + over a default BACKGROUND reservation assignment + (if it exists). + BACKGROUND_SEARCH_INDEX_REFRESH (9): + Finer granularity background jobs for + refreshing search indexes upon BigQuery table + columns. Reservations with this job type take + priority over a default BACKGROUND reservation + assignment (if it exists). """ JOB_TYPE_UNSPECIFIED = 0 PIPELINE = 1 @@ -1173,6 +1451,9 @@ class JobType(proto.Enum): ML_EXTERNAL = 3 BACKGROUND = 4 CONTINUOUS = 6 + BACKGROUND_CHANGE_DATA_CAPTURE = 7 + BACKGROUND_COLUMN_METADATA_INDEX = 8 + BACKGROUND_SEARCH_INDEX_REFRESH = 9 class State(proto.Enum): r"""Assignment will remain in PENDING state if no active capacity @@ -1214,6 +1495,11 @@ class State(proto.Enum): proto.BOOL, number=10, ) + scheduling_policy: "SchedulingPolicy" = proto.Field( + proto.MESSAGE, + number=11, + message="SchedulingPolicy", + ) class CreateAssignmentRequest(proto.Message): @@ -1549,11 +1835,14 @@ class TableReference(proto.Message): Attributes: project_id (str): - The assigned project ID of the project. + Optional. The assigned project ID of the + project. dataset_id (str): - The ID of the dataset in the above project. + Optional. The ID of the dataset in the above + project. table_id (str): - The ID of the table in the above dataset. + Optional. The ID of the table in the above + dataset. """ project_id: str = proto.Field( @@ -1575,16 +1864,17 @@ class BiReservation(proto.Message): Attributes: name (str): - The resource name of the singleton BI reservation. - Reservation names have the form + Identifier. The resource name of the singleton BI + reservation. Reservation names have the form ``projects/{project_id}/locations/{location_id}/biReservation``. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last update timestamp of a reservation. size (int): - Size of a reservation, in bytes. + Optional. Size of a reservation, in bytes. preferred_tables (MutableSequence[google.cloud.bigquery_reservation_v1.types.TableReference]): - Preferred tables to use BI capacity for. + Optional. Preferred tables to use BI capacity + for. """ name: str = proto.Field( diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_group_async.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_group_async.py new file mode 100644 index 000000000000..67f90605b006 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReservationGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_CreateReservationGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 + + +async def sample_create_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateReservationGroupRequest( + parent="parent_value", + reservation_group_id="reservation_group_id_value", + ) + + # Make the request + response = await client.create_reservation_group(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_CreateReservationGroup_async] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_group_sync.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_group_sync.py new file mode 100644 index 000000000000..07280cec9a5d --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_create_reservation_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateReservationGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_CreateReservationGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 + + +def sample_create_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.CreateReservationGroupRequest( + parent="parent_value", + reservation_group_id="reservation_group_id_value", + ) + + # Make the request + response = client.create_reservation_group(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_CreateReservationGroup_sync] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_async.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_async.py new file mode 100644 index 000000000000..abe35d53fc7d --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteReservationGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_DeleteReservationGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 + + +async def sample_delete_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteReservationGroupRequest( + name="name_value", + ) + + # Make the request + await client.delete_reservation_group(request=request) + + +# [END bigqueryreservation_v1_generated_ReservationService_DeleteReservationGroup_async] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_sync.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_sync.py new file mode 100644 index 000000000000..949130fd02eb --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteReservationGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_DeleteReservationGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 + + +def sample_delete_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.DeleteReservationGroupRequest( + name="name_value", + ) + + # Make the request + client.delete_reservation_group(request=request) + + +# [END bigqueryreservation_v1_generated_ReservationService_DeleteReservationGroup_sync] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_iam_policy_async.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_iam_policy_async.py new file mode 100644 index 000000000000..d1551d5fbf8f --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_GetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_get_iam_policy(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_GetIamPolicy_async] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_iam_policy_sync.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_iam_policy_sync.py new file mode 100644 index 000000000000..d03d29ba4235 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_get_iam_policy(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_GetIamPolicy_sync] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_group_async.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_group_async.py new file mode 100644 index 000000000000..b224b51643c2 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_group_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetReservationGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_GetReservationGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 + + +async def sample_get_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetReservationGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_reservation_group(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_GetReservationGroup_async] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_group_sync.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_group_sync.py new file mode 100644 index 000000000000..e812a8c8b314 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_get_reservation_group_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetReservationGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_GetReservationGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 + + +def sample_get_reservation_group(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.GetReservationGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_reservation_group(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_GetReservationGroup_sync] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_async.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_async.py new file mode 100644 index 000000000000..afe0abe68b85 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReservationGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_ListReservationGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 + + +async def sample_list_reservation_groups(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListReservationGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_reservation_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_ListReservationGroups_async] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_sync.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_sync.py new file mode 100644 index 000000000000..e7b240a615d7 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListReservationGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_ListReservationGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 + + +def sample_list_reservation_groups(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = bigquery_reservation_v1.ListReservationGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_reservation_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_ListReservationGroups_sync] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_set_iam_policy_async.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_set_iam_policy_async.py new file mode 100644 index 000000000000..2a6c174412c3 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_set_iam_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_SetIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_set_iam_policy(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = await client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_SetIamPolicy_async] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_set_iam_policy_sync.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_set_iam_policy_sync.py new file mode 100644 index 000000000000..2fd8cb712160 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_set_iam_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_set_iam_policy(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_SetIamPolicy_sync] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_async.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_async.py new file mode 100644 index 000000000000..5e0f734f1f53 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_TestIamPermissions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +async def sample_test_iam_permissions(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceAsyncClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = await client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_TestIamPermissions_async] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_sync.py b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_sync.py new file mode 100644 index 000000000000..803f313da341 --- /dev/null +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-reservation + + +# [START bigqueryreservation_v1_generated_ReservationService_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import bigquery_reservation_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore + + +def sample_test_iam_permissions(): + # Create a client + client = bigquery_reservation_v1.ReservationServiceClient() + + # Initialize request argument(s) + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + permissions=['permissions_value1', 'permissions_value2'], + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + +# [END bigqueryreservation_v1_generated_ReservationService_TestIamPermissions_sync] diff --git a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json index a2bccf0fed7b..92ed8263ff68 100644 --- a/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json +++ b/packages/google-cloud-bigquery-reservation/samples/generated_samples/snippet_metadata_google.cloud.bigquery.reservation.v1.json @@ -349,6 +349,159 @@ ], "title": "bigqueryreservation_v1_generated_reservation_service_create_capacity_commitment_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", + "shortName": "ReservationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.create_reservation_group", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.CreateReservationGroup", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "CreateReservationGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.CreateReservationGroupRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_reservation_v1.types.ReservationGroup", + "shortName": "create_reservation_group" + }, + "description": "Sample for CreateReservationGroup", + "file": "bigqueryreservation_v1_generated_reservation_service_create_reservation_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_CreateReservationGroup_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_create_reservation_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", + "shortName": "ReservationServiceClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.create_reservation_group", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.CreateReservationGroup", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "CreateReservationGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.CreateReservationGroupRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_reservation_v1.types.ReservationGroup", + "shortName": "create_reservation_group" + }, + "description": "Sample for CreateReservationGroup", + "file": "bigqueryreservation_v1_generated_reservation_service_create_reservation_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_CreateReservationGroup_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_create_reservation_group_sync.py" + }, { "canonical": true, "clientMethod": { @@ -844,19 +997,19 @@ "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", "shortName": "ReservationServiceAsyncClient" }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.delete_reservation", + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.delete_reservation_group", "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation", + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservationGroup", "service": { "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", "shortName": "ReservationService" }, - "shortName": "DeleteReservation" + "shortName": "DeleteReservationGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest" + "type": "google.cloud.bigquery_reservation_v1.types.DeleteReservationGroupRequest" }, { "name": "name", @@ -875,13 +1028,13 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_reservation" + "shortName": "delete_reservation_group" }, - "description": "Sample for DeleteReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py", + "description": "Sample for DeleteReservationGroup", + "file": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteReservation_async", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteReservationGroup_async", "segments": [ { "end": 49, @@ -912,7 +1065,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py" + "title": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_async.py" }, { "canonical": true, @@ -921,19 +1074,19 @@ "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", "shortName": "ReservationServiceClient" }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.delete_reservation", + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.delete_reservation_group", "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation", + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservationGroup", "service": { "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", "shortName": "ReservationService" }, - "shortName": "DeleteReservation" + "shortName": "DeleteReservationGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest" + "type": "google.cloud.bigquery_reservation_v1.types.DeleteReservationGroupRequest" }, { "name": "name", @@ -952,13 +1105,13 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_reservation" + "shortName": "delete_reservation_group" }, - "description": "Sample for DeleteReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py", + "description": "Sample for DeleteReservationGroup", + "file": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteReservation_sync", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteReservationGroup_sync", "segments": [ { "end": 49, @@ -989,7 +1142,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py" + "title": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_group_sync.py" }, { "canonical": true, @@ -999,19 +1152,23 @@ "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", "shortName": "ReservationServiceAsyncClient" }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.failover_reservation", + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.delete_reservation", "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.FailoverReservation", + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation", "service": { "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", "shortName": "ReservationService" }, - "shortName": "FailoverReservation" + "shortName": "DeleteReservation" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.FailoverReservationRequest" + "type": "google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1026,22 +1183,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "failover_reservation" + "shortName": "delete_reservation" }, - "description": "Sample for FailoverReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py", + "description": "Sample for DeleteReservation", + "file": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_FailoverReservation_async", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteReservation_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1056,17 +1212,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py" + "title": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_async.py" }, { "canonical": true, @@ -1075,19 +1229,23 @@ "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", "shortName": "ReservationServiceClient" }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.failover_reservation", + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.delete_reservation", "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.FailoverReservation", + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.DeleteReservation", "service": { "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", "shortName": "ReservationService" }, - "shortName": "FailoverReservation" + "shortName": "DeleteReservation" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.FailoverReservationRequest" + "type": "google.cloud.bigquery_reservation_v1.types.DeleteReservationRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1102,22 +1260,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", - "shortName": "failover_reservation" + "shortName": "delete_reservation" }, - "description": "Sample for FailoverReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py", + "description": "Sample for DeleteReservation", + "file": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_FailoverReservation_sync", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_DeleteReservation_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1132,17 +1289,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py" + "title": "bigqueryreservation_v1_generated_reservation_service_delete_reservation_sync.py" }, { "canonical": true, @@ -1152,23 +1307,19 @@ "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", "shortName": "ReservationServiceAsyncClient" }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_bi_reservation", + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.failover_reservation", "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetBiReservation", + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.FailoverReservation", "service": { "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", "shortName": "ReservationService" }, - "shortName": "GetBiReservation" + "shortName": "FailoverReservation" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.bigquery_reservation_v1.types.FailoverReservationRequest" }, { "name": "retry", @@ -1183,14 +1334,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.bigquery_reservation_v1.types.BiReservation", - "shortName": "get_bi_reservation" + "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", + "shortName": "failover_reservation" }, - "description": "Sample for GetBiReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py", + "description": "Sample for FailoverReservation", + "file": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetBiReservation_async", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_FailoverReservation_async", "segments": [ { "end": 51, @@ -1223,7 +1374,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py" + "title": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_async.py" }, { "canonical": true, @@ -1232,23 +1383,19 @@ "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", "shortName": "ReservationServiceClient" }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_bi_reservation", + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.failover_reservation", "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetBiReservation", + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.FailoverReservation", "service": { "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", "shortName": "ReservationService" }, - "shortName": "GetBiReservation" + "shortName": "FailoverReservation" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.bigquery_reservation_v1.types.FailoverReservationRequest" }, { "name": "retry", @@ -1263,14 +1410,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.bigquery_reservation_v1.types.BiReservation", - "shortName": "get_bi_reservation" + "resultType": "google.cloud.bigquery_reservation_v1.types.Reservation", + "shortName": "failover_reservation" }, - "description": "Sample for GetBiReservation", - "file": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py", + "description": "Sample for FailoverReservation", + "file": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetBiReservation_sync", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_FailoverReservation_sync", "segments": [ { "end": 51, @@ -1303,7 +1450,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py" + "title": "bigqueryreservation_v1_generated_reservation_service_failover_reservation_sync.py" }, { "canonical": true, @@ -1313,19 +1460,502 @@ "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", "shortName": "ReservationServiceAsyncClient" }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_capacity_commitment", + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_bi_reservation", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetBiReservation", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "GetBiReservation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_reservation_v1.types.BiReservation", + "shortName": "get_bi_reservation" + }, + "description": "Sample for GetBiReservation", + "file": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetBiReservation_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", + "shortName": "ReservationServiceClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_bi_reservation", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetBiReservation", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "GetBiReservation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.GetBiReservationRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_reservation_v1.types.BiReservation", + "shortName": "get_bi_reservation" + }, + "description": "Sample for GetBiReservation", + "file": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetBiReservation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_get_bi_reservation_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", + "shortName": "ReservationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_capacity_commitment", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "GetCapacityCommitment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", + "shortName": "get_capacity_commitment" + }, + "description": "Sample for GetCapacityCommitment", + "file": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", + "shortName": "ReservationServiceClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_capacity_commitment", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "GetCapacityCommitment" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", + "shortName": "get_capacity_commitment" + }, + "description": "Sample for GetCapacityCommitment", + "file": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", + "shortName": "ReservationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_iam_policy", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetIamPolicy", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "bigqueryreservation_v1_generated_reservation_service_get_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_get_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", + "shortName": "ReservationServiceClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_iam_policy", "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment", + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetIamPolicy", "service": { "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", "shortName": "ReservationService" }, - "shortName": "GetCapacityCommitment" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest" + "type": "google.iam.v1.iam_policy_pb2.GetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "bigqueryreservation_v1_generated_reservation_service_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", + "shortName": "ReservationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.get_reservation_group", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetReservationGroup", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "GetReservationGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.GetReservationGroupRequest" }, { "name": "name", @@ -1344,14 +1974,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "get_capacity_commitment" + "resultType": "google.cloud.bigquery_reservation_v1.types.ReservationGroup", + "shortName": "get_reservation_group" }, - "description": "Sample for GetCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py", + "description": "Sample for GetReservationGroup", + "file": "bigqueryreservation_v1_generated_reservation_service_get_reservation_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_async", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetReservationGroup_async", "segments": [ { "end": 51, @@ -1384,7 +2014,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_async.py" + "title": "bigqueryreservation_v1_generated_reservation_service_get_reservation_group_async.py" }, { "canonical": true, @@ -1393,19 +2023,19 @@ "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", "shortName": "ReservationServiceClient" }, - "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_capacity_commitment", + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.get_reservation_group", "method": { - "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetCapacityCommitment", + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.GetReservationGroup", "service": { "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", "shortName": "ReservationService" }, - "shortName": "GetCapacityCommitment" + "shortName": "GetReservationGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.bigquery_reservation_v1.types.GetCapacityCommitmentRequest" + "type": "google.cloud.bigquery_reservation_v1.types.GetReservationGroupRequest" }, { "name": "name", @@ -1424,14 +2054,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.bigquery_reservation_v1.types.CapacityCommitment", - "shortName": "get_capacity_commitment" + "resultType": "google.cloud.bigquery_reservation_v1.types.ReservationGroup", + "shortName": "get_reservation_group" }, - "description": "Sample for GetCapacityCommitment", - "file": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py", + "description": "Sample for GetReservationGroup", + "file": "bigqueryreservation_v1_generated_reservation_service_get_reservation_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetCapacityCommitment_sync", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_GetReservationGroup_sync", "segments": [ { "end": 51, @@ -1464,7 +2094,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_get_capacity_commitment_sync.py" + "title": "bigqueryreservation_v1_generated_reservation_service_get_reservation_group_sync.py" }, { "canonical": true, @@ -1907,14 +2537,175 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListCapacityCommitmentsPager", - "shortName": "list_capacity_commitments" + "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListCapacityCommitmentsPager", + "shortName": "list_capacity_commitments" + }, + "description": "Sample for ListCapacityCommitments", + "file": "bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListCapacityCommitments_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", + "shortName": "ReservationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.list_reservation_groups", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "ListReservationGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.ListReservationGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationGroupsAsyncPager", + "shortName": "list_reservation_groups" + }, + "description": "Sample for ListReservationGroups", + "file": "bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListReservationGroups_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", + "shortName": "ReservationServiceClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.list_reservation_groups", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.ListReservationGroups", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "ListReservationGroups" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.bigquery_reservation_v1.types.ListReservationGroupsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.bigquery_reservation_v1.services.reservation_service.pagers.ListReservationGroupsPager", + "shortName": "list_reservation_groups" }, - "description": "Sample for ListCapacityCommitments", - "file": "bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py", + "description": "Sample for ListReservationGroups", + "file": "bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListCapacityCommitments_sync", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_ListReservationGroups_sync", "segments": [ { "end": 52, @@ -1947,7 +2738,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "bigqueryreservation_v1_generated_reservation_service_list_capacity_commitments_sync.py" + "title": "bigqueryreservation_v1_generated_reservation_service_list_reservation_groups_sync.py" }, { "canonical": true, @@ -2786,6 +3577,167 @@ ], "title": "bigqueryreservation_v1_generated_reservation_service_search_assignments_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", + "shortName": "ReservationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.set_iam_policy", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.SetIamPolicy", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "bigqueryreservation_v1_generated_reservation_service_set_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_SetIamPolicy_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_set_iam_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", + "shortName": "ReservationServiceClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.set_iam_policy", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.SetIamPolicy", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.SetIamPolicyRequest" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.policy_pb2.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "bigqueryreservation_v1_generated_reservation_service_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_SetIamPolicy_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_set_iam_policy_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2955,6 +3907,159 @@ ], "title": "bigqueryreservation_v1_generated_reservation_service_split_capacity_commitment_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient", + "shortName": "ReservationServiceAsyncClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceAsyncClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.TestIamPermissions", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_TestIamPermissions_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient", + "shortName": "ReservationServiceClient" + }, + "fullName": "google.cloud.bigquery_reservation_v1.ReservationServiceClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService.TestIamPermissions", + "service": { + "fullName": "google.cloud.bigquery.reservation.v1.ReservationService", + "shortName": "ReservationService" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "bigqueryreservation_v1_generated_ReservationService_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 41, + "start": 39, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 42, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "bigqueryreservation_v1_generated_reservation_service_test_iam_permissions_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-bigquery-reservation/scripts/fixup_bigquery_reservation_v1_keywords.py b/packages/google-cloud-bigquery-reservation/scripts/fixup_bigquery_reservation_v1_keywords.py index c97f0fa331cf..1bcaf6d5ab34 100644 --- a/packages/google-cloud-bigquery-reservation/scripts/fixup_bigquery_reservation_v1_keywords.py +++ b/packages/google-cloud-bigquery-reservation/scripts/fixup_bigquery_reservation_v1_keywords.py @@ -42,21 +42,28 @@ class bigquery_reservationCallTransformer(cst.CSTTransformer): 'create_assignment': ('parent', 'assignment', 'assignment_id', ), 'create_capacity_commitment': ('parent', 'capacity_commitment', 'enforce_single_admin_project_per_org', 'capacity_commitment_id', ), 'create_reservation': ('parent', 'reservation_id', 'reservation', ), + 'create_reservation_group': ('parent', 'reservation_group_id', 'reservation_group', ), 'delete_assignment': ('name', ), 'delete_capacity_commitment': ('name', 'force', ), 'delete_reservation': ('name', ), + 'delete_reservation_group': ('name', ), 'failover_reservation': ('name', 'failover_mode', ), 'get_bi_reservation': ('name', ), 'get_capacity_commitment': ('name', ), + 'get_iam_policy': ('resource', 'options', ), 'get_reservation': ('name', ), + 'get_reservation_group': ('name', ), 'list_assignments': ('parent', 'page_size', 'page_token', ), 'list_capacity_commitments': ('parent', 'page_size', 'page_token', ), + 'list_reservation_groups': ('parent', 'page_size', 'page_token', ), 'list_reservations': ('parent', 'page_size', 'page_token', ), - 'merge_capacity_commitments': ('parent', 'capacity_commitment_ids', ), + 'merge_capacity_commitments': ('parent', 'capacity_commitment_ids', 'capacity_commitment_id', ), 'move_assignment': ('name', 'destination_id', 'assignment_id', ), 'search_all_assignments': ('parent', 'query', 'page_size', 'page_token', ), 'search_assignments': ('parent', 'query', 'page_size', 'page_token', ), + 'set_iam_policy': ('resource', 'policy', 'update_mask', ), 'split_capacity_commitment': ('name', 'slot_count', ), + 'test_iam_permissions': ('resource', 'permissions', ), 'update_assignment': ('assignment', 'update_mask', ), 'update_bi_reservation': ('bi_reservation', 'update_mask', ), 'update_capacity_commitment': ('capacity_commitment', 'update_mask', ), diff --git a/packages/google-cloud-bigquery-reservation/setup.py b/packages/google-cloud-bigquery-reservation/setup.py index a245620cdae0..254bae4e9084 100644 --- a/packages/google-cloud-bigquery-reservation/setup.py +++ b/packages/google-cloud-bigquery-reservation/setup.py @@ -48,6 +48,7 @@ "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0", ] extras = {} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-bigquery-reservation" diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.10.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.10.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-bigquery-reservation/testing/constraints-3.10.txt +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.10.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.11.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.11.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-bigquery-reservation/testing/constraints-3.11.txt +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.11.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.12.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.12.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-bigquery-reservation/testing/constraints-3.12.txt +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.12.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.13.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.13.txt index c20a77817caa..2010e549cceb 100644 --- a/packages/google-cloud-bigquery-reservation/testing/constraints-3.13.txt +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.13.txt @@ -9,3 +9,4 @@ google-api-core>=2 google-auth>=2 proto-plus>=1 protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.7.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.7.txt index a77f12bc13e4..56affbd9bd75 100644 --- a/packages/google-cloud-bigquery-reservation/testing/constraints-3.7.txt +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.7.txt @@ -8,3 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 +grpc-google-iam-v1==0.14.0 diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.8.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.8.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-bigquery-reservation/testing/constraints-3.8.txt +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.8.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-reservation/testing/constraints-3.9.txt b/packages/google-cloud-bigquery-reservation/testing/constraints-3.9.txt index ed7f9aed2559..ad3f0fa58e2d 100644 --- a/packages/google-cloud-bigquery-reservation/testing/constraints-3.9.txt +++ b/packages/google-cloud-bigquery-reservation/testing/constraints-3.9.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py index b6079f81b9e9..aab1b11237bc 100644 --- a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py @@ -50,11 +50,15 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore from google.cloud.bigquery_reservation_v1.services.reservation_service import ( ReservationServiceAsyncClient, @@ -1194,6 +1198,7 @@ def test_create_reservation(request_type, transport: str = "grpc"): original_primary_location="original_primary_location_value", max_slots=986, scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) response = client.create_reservation(request) @@ -1218,6 +1223,7 @@ def test_create_reservation(request_type, transport: str = "grpc"): assert ( response.scaling_mode == gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY ) + assert response.reservation_group == "reservation_group_value" def test_create_reservation_non_empty_request_with_auto_populated_field(): @@ -1365,6 +1371,7 @@ async def test_create_reservation_async( original_primary_location="original_primary_location_value", max_slots=986, scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) ) response = await client.create_reservation(request) @@ -1390,6 +1397,7 @@ async def test_create_reservation_async( assert ( response.scaling_mode == gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY ) + assert response.reservation_group == "reservation_group_value" @pytest.mark.asyncio @@ -2143,6 +2151,7 @@ def test_get_reservation(request_type, transport: str = "grpc"): original_primary_location="original_primary_location_value", max_slots=986, scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) response = client.get_reservation(request) @@ -2165,6 +2174,7 @@ def test_get_reservation(request_type, transport: str = "grpc"): assert response.original_primary_location == "original_primary_location_value" assert response.max_slots == 986 assert response.scaling_mode == reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + assert response.reservation_group == "reservation_group_value" def test_get_reservation_non_empty_request_with_auto_populated_field(): @@ -2301,6 +2311,7 @@ async def test_get_reservation_async( original_primary_location="original_primary_location_value", max_slots=986, scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) ) response = await client.get_reservation(request) @@ -2324,6 +2335,7 @@ async def test_get_reservation_async( assert response.original_primary_location == "original_primary_location_value" assert response.max_slots == 986 assert response.scaling_mode == reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + assert response.reservation_group == "reservation_group_value" @pytest.mark.asyncio @@ -2837,6 +2849,7 @@ def test_update_reservation(request_type, transport: str = "grpc"): original_primary_location="original_primary_location_value", max_slots=986, scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) response = client.update_reservation(request) @@ -2861,6 +2874,7 @@ def test_update_reservation(request_type, transport: str = "grpc"): assert ( response.scaling_mode == gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY ) + assert response.reservation_group == "reservation_group_value" def test_update_reservation_non_empty_request_with_auto_populated_field(): @@ -3002,6 +3016,7 @@ async def test_update_reservation_async( original_primary_location="original_primary_location_value", max_slots=986, scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) ) response = await client.update_reservation(request) @@ -3027,6 +3042,7 @@ async def test_update_reservation_async( assert ( response.scaling_mode == gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY ) + assert response.reservation_group == "reservation_group_value" @pytest.mark.asyncio @@ -3229,6 +3245,7 @@ def test_failover_reservation(request_type, transport: str = "grpc"): original_primary_location="original_primary_location_value", max_slots=986, scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) response = client.failover_reservation(request) @@ -3251,6 +3268,7 @@ def test_failover_reservation(request_type, transport: str = "grpc"): assert response.original_primary_location == "original_primary_location_value" assert response.max_slots == 986 assert response.scaling_mode == reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + assert response.reservation_group == "reservation_group_value" def test_failover_reservation_non_empty_request_with_auto_populated_field(): @@ -3395,6 +3413,7 @@ async def test_failover_reservation_async( original_primary_location="original_primary_location_value", max_slots=986, scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) ) response = await client.failover_reservation(request) @@ -3418,6 +3437,7 @@ async def test_failover_reservation_async( assert response.original_primary_location == "original_primary_location_value" assert response.max_slots == 986 assert response.scaling_mode == reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + assert response.reservation_group == "reservation_group_value" @pytest.mark.asyncio @@ -5913,6 +5933,7 @@ def test_merge_capacity_commitments_non_empty_request_with_auto_populated_field( # if they meet the requirements of AIP 4235. request = reservation.MergeCapacityCommitmentsRequest( parent="parent_value", + capacity_commitment_id="capacity_commitment_id_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5927,6 +5948,7 @@ def test_merge_capacity_commitments_non_empty_request_with_auto_populated_field( _, args, _ = call.mock_calls[0] assert args[0] == reservation.MergeCapacityCommitmentsRequest( parent="parent_value", + capacity_commitment_id="capacity_commitment_id_value", ) @@ -9971,13 +9993,79 @@ async def test_update_bi_reservation_flattened_error_async(): ) -def test_create_reservation_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.GetIamPolicyRequest( + resource="resource_value", + ) + + +def test_get_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -9985,188 +10073,339 @@ def test_create_reservation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_reservation in client._transport._wrapped_methods - ) + assert client._transport.get_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_reservation - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc request = {} - client.create_reservation(request) + client.get_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_reservation(request) + client.get_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_reservation_rest_required_fields( - request_type=gcbr_reservation.CreateReservationRequest, +@pytest.mark.asyncio +async def test_get_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ReservationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.get_iam_policy + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_iam_policy + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.get_iam_policy(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_reservation._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("reservation_id",)) - jsonified_request.update(unset_fields) + await client.get_iam_policy(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.get_iam_policy(request) - # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.GetIamPolicyRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" - response = client.create_reservation(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) -def test_create_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_get_iam_policy_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.create_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(("reservationId",)) & set(("parent",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource_value" -def test_create_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.resource = "resource_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - reservation=gcbr_reservation.Reservation(name="name_value"), - reservation_id="reservation_id_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.get_iam_policy(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.create_reservation(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/reservations" - % client.transport._host, - args[1], + +def test_get_iam_policy_from_dict_foreign(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } ) + call.assert_called() -def test_create_reservation_rest_flattened_error(transport: str = "rest"): +def test_get_iam_policy_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_reservation( - gcbr_reservation.CreateReservationRequest(), - parent="parent_value", - reservation=gcbr_reservation.Reservation(name="name_value"), - reservation_id="reservation_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_iam_policy( + resource="resource_value", ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val -def test_list_reservations_rest_use_cached_wrapped_rpc(): + +def test_get_iam_policy_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_iam_policy_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.SetIamPolicyRequest( + resource="resource_value", + ) + + +def test_set_iam_policy_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10174,257 +10413,342 @@ def test_list_reservations_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_reservations in client._transport._wrapped_methods + assert client._transport.set_iam_policy in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_reservations - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc request = {} - client.list_reservations(request) + client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_reservations(request) + client.set_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_reservations_rest_required_fields( - request_type=reservation.ListReservationsRequest, +@pytest.mark.asyncio +async def test_set_iam_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_reservations._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify required fields with default values are now present + # Ensure method has been cached + assert ( + client._client._transport.set_iam_policy + in client._client._transport._wrapped_methods + ) - jsonified_request["parent"] = "parent_value" + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_iam_policy + ] = mock_rpc - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_reservations._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + request = {} + await client.set_iam_policy(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.set_iam_policy(request) - # Designate an appropriate value for the returned response. - return_value = reservation.ListReservationsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.ListReservationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.list_reservations(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.SetIamPolicyRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" -def test_list_reservations_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.list_reservations._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) -def test_list_reservations_rest_flattened(): +def test_set_iam_policy_field_headers(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reservation.ListReservationsResponse() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.resource = "resource_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.ListReservationsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.list_reservations(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + + request.resource = "resource_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict_foreign(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + "update_mask": field_mask_pb2.FieldMask(paths=["paths_value"]), + } + ) + call.assert_called() + + +def test_set_iam_policy_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.set_iam_policy( + resource="resource_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/reservations" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val -def test_list_reservations_rest_flattened_error(transport: str = "rest"): +def test_set_iam_policy_flattened_error(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_reservations( - reservation.ListReservationsRequest(), - parent="parent_value", + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", ) -def test_list_reservations_rest_pager(transport: str = "rest"): +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.set_iam_policy( + resource="resource_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource + mock_val = "resource_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_set_iam_policy_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions(request_type, transport: str = "grpc"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - reservation.Reservation(), - ], - next_page_token="abc", - ), - reservation.ListReservationsResponse( - reservations=[], - next_page_token="def", - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - ], - next_page_token="ghi", - ), - reservation.ListReservationsResponse( - reservations=[ - reservation.Reservation(), - reservation.Reservation(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple( - reservation.ListReservationsResponse.to_json(x) for x in response + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + response = client.test_iam_permissions(request) - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request - pager = client.list_reservations(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Reservation) for i in results) - pages = list(client.list_reservations(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_test_iam_permissions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + ) -def test_get_reservation_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.test_iam_permissions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == iam_policy_pb2.TestIamPermissionsRequest( + resource="resource_value", + ) + + +def test_test_iam_permissions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10432,179 +10756,276 @@ def test_get_reservation_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_reservation in client._transport._wrapped_methods + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_reservation] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc request = {} - client.get_reservation(request) + client.test_iam_permissions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_reservation(request) + client.test_iam_permissions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_reservation_rest_required_fields( - request_type=reservation.GetReservationRequest, +@pytest.mark.asyncio +async def test_test_iam_permissions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ReservationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.test_iam_permissions + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.test_iam_permissions + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.test_iam_permissions(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.test_iam_permissions(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = reservation.Reservation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) - # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = iam_policy_pb2.TestIamPermissionsRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] - response = client.get_reservation(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) -def test_get_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_test_iam_permissions_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource_value" -def test_get_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Reservation() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/reservations/sample3" - } + request.resource = "resource_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() ) - mock_args.update(sample_request) + await client.test_iam_permissions(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.get_reservation(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource_value", + ) in kw["metadata"] - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/reservations/*}" - % client.transport._host, - args[1], + +def test_test_iam_permissions_from_dict_foreign(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } ) + call.assert_called() -def test_get_reservation_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + reservation.CreateReservationGroupRequest, + dict, + ], +) +def test_create_reservation_group(request_type, transport: str = "grpc"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_reservation( - reservation.GetReservationRequest(), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ReservationGroup( name="name_value", ) + response = client.create_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = reservation.CreateReservationGroupRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.ReservationGroup) + assert response.name == "name_value" -def test_delete_reservation_rest_use_cached_wrapped_rpc(): + +def test_create_reservation_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = reservation.CreateReservationGroupRequest( + parent="parent_value", + reservation_group_id="reservation_group_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_reservation_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.CreateReservationGroupRequest( + parent="parent_value", + reservation_group_id="reservation_group_id_value", + ) + + +def test_create_reservation_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10613,7 +11034,8 @@ def test_delete_reservation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.delete_reservation in client._transport._wrapped_methods + client._transport.create_reservation_group + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -10622,168 +11044,245 @@ def test_delete_reservation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_reservation + client._transport.create_reservation_group ] = mock_rpc - request = {} - client.delete_reservation(request) + client.create_reservation_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_reservation(request) + client.create_reservation_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_reservation_rest_required_fields( - request_type=reservation.DeleteReservationRequest, +@pytest.mark.asyncio +async def test_create_reservation_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ReservationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.create_reservation_group + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_reservation_group + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.create_reservation_group(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.create_reservation_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_reservation_group_async( + transport: str = "grpc_asyncio", + request_type=reservation.CreateReservationGroupRequest, +): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ReservationGroup( + name="name_value", + ) + ) + response = await client.create_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = reservation.CreateReservationGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.ReservationGroup) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_create_reservation_group_async_from_dict(): + await test_create_reservation_group_async(request_type=dict) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +def test_create_reservation_group_field_headers(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.CreateReservationGroupRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = "" + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation_group), "__call__" + ) as call: + call.return_value = reservation.ReservationGroup() + client.create_reservation_group(request) - response = client.delete_reservation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_delete_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_create_reservation_group_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.delete_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.CreateReservationGroupRequest() + request.parent = "parent_value" -def test_delete_reservation_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ReservationGroup() + ) + await client.create_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.GetReservationGroupRequest, + dict, + ], +) +def test_get_reservation_group(request_type, transport: str = "grpc"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/reservations/sample3" - } + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ReservationGroup( name="name_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_reservation_group(request) - client.delete_reservation(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = reservation.GetReservationGroupRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/reservations/*}" - % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.ReservationGroup) + assert response.name == "name_value" -def test_delete_reservation_rest_flattened_error(transport: str = "rest"): +def test_get_reservation_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_reservation( - reservation.DeleteReservationRequest(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = reservation.GetReservationGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_reservation_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.GetReservationGroupRequest( name="name_value", ) -def test_update_reservation_rest_use_cached_wrapped_rpc(): +def test_get_reservation_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10792,7 +11291,8 @@ def test_update_reservation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_reservation in client._transport._wrapped_methods + client._transport.get_reservation_group + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -10801,93 +11301,327 @@ def test_update_reservation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_reservation + client._transport.get_reservation_group ] = mock_rpc - request = {} - client.update_reservation(request) + client.get_reservation_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_reservation(request) + client.get_reservation_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_reservation_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation() +@pytest.mark.asyncio +async def test_get_reservation_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "reservation": { - "name": "projects/sample1/locations/sample2/reservations/sample3" - } - } + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # get truthy value for each flattened field - mock_args = dict( - reservation=gcbr_reservation.Reservation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Ensure method has been cached + assert ( + client._client._transport.get_reservation_group + in client._client._transport._wrapped_methods ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_reservation_group + ] = mock_rpc - client.update_reservation(**mock_args) + request = {} + await client.get_reservation_group(request) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{reservation.name=projects/*/locations/*/reservations/*}" - % client.transport._host, - args[1], + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_reservation_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_reservation_group_async( + transport: str = "grpc_asyncio", request_type=reservation.GetReservationGroupRequest +): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ReservationGroup( + name="name_value", + ) ) + response = await client.get_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = reservation.GetReservationGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.ReservationGroup) + assert response.name == "name_value" -def test_update_reservation_rest_flattened_error(transport: str = "rest"): +@pytest.mark.asyncio +async def test_get_reservation_group_async_from_dict(): + await test_get_reservation_group_async(request_type=dict) + + +def test_get_reservation_group_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.GetReservationGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + call.return_value = reservation.ReservationGroup() + client.get_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_reservation_group_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.GetReservationGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ReservationGroup() + ) + await client.get_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_reservation_group_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ReservationGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_reservation_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_reservation_group_flattened_error(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_reservation( - gcbr_reservation.UpdateReservationRequest(), - reservation=gcbr_reservation.Reservation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.get_reservation_group( + reservation.GetReservationGroupRequest(), + name="name_value", ) -def test_failover_reservation_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_get_reservation_group_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ReservationGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ReservationGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_reservation_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_reservation_group_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_reservation_group( + reservation.GetReservationGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.DeleteReservationGroupRequest, + dict, + ], +) +def test_delete_reservation_group(request_type, transport: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = reservation.DeleteReservationGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_reservation_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = reservation.DeleteReservationGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_reservation_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.DeleteReservationGroupRequest( + name="name_value", + ) + + +def test_delete_reservation_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -10896,7 +11630,8 @@ def test_failover_reservation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.failover_reservation in client._transport._wrapped_methods + client._transport.delete_reservation_group + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -10905,315 +11640,324 @@ def test_failover_reservation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.failover_reservation + client._transport.delete_reservation_group ] = mock_rpc - request = {} - client.failover_reservation(request) + client.delete_reservation_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.failover_reservation(request) + client.delete_reservation_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_failover_reservation_rest_required_fields( - request_type=reservation.FailoverReservationRequest, +@pytest.mark.asyncio +async def test_delete_reservation_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ReservationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.delete_reservation_group + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).failover_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_reservation_group + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.delete_reservation_group(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).failover_reservation._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + await client.delete_reservation_group(request) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_delete_reservation_group_async( + transport: str = "grpc_asyncio", + request_type=reservation.DeleteReservationGroupRequest, +): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = reservation.Reservation() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_reservation_group(request) - # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = reservation.DeleteReservationGroupRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert response is None - response = client.failover_reservation(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_delete_reservation_group_async_from_dict(): + await test_delete_reservation_group_async(request_type=dict) -def test_failover_reservation_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_delete_reservation_group_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.failover_reservation._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_create_capacity_commitment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.create_capacity_commitment - in client._transport._wrapped_methods - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.DeleteReservationGroupRequest() - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_capacity_commitment - ] = mock_rpc + request.name = "name_value" - request = {} - client.create_capacity_commitment(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + call.return_value = None + client.delete_reservation_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_create_capacity_commitment_rest_required_fields( - request_type=reservation.CreateCapacityCommitmentRequest, -): - transport_class = transports.ReservationServiceRestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +@pytest.mark.asyncio +async def test_delete_reservation_group_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - # verify fields with default values are dropped + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.DeleteReservationGroupRequest() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_capacity_commitment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request.name = "name_value" - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_reservation_group(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_capacity_commitment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "capacity_commitment_id", - "enforce_single_admin_project_per_org", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +def test_delete_reservation_group_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_reservation_group( + name="name_value", + ) - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_capacity_commitment(request) +def test_delete_reservation_group_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_reservation_group( + reservation.DeleteReservationGroupRequest(), + name="name_value", + ) -def test_create_capacity_commitment_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_delete_reservation_group_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_capacity_commitment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "capacityCommitmentId", - "enforceSingleAdminProjectPerOrg", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_reservation_group( + name="name_value", ) - & set(("parent",)) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_reservation_group_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_reservation_group( + reservation.DeleteReservationGroupRequest(), + name="name_value", + ) + -def test_create_capacity_commitment_rest_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListReservationGroupsRequest, + dict, + ], +) +def test_list_reservation_groups(request_type, transport: str = "grpc"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - capacity_commitment=reservation.CapacityCommitment(name="name_value"), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListReservationGroupsResponse( + next_page_token="next_page_token_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_reservation_groups(request) - client.create_capacity_commitment(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = reservation.ListReservationGroupsRequest() + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/capacityCommitments" - % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReservationGroupsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_capacity_commitment_rest_flattened_error(transport: str = "rest"): +def test_list_reservation_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="grpc", ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_capacity_commitment( - reservation.CreateCapacityCommitmentRequest(), + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = reservation.ListReservationGroupsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_reservation_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == reservation.ListReservationGroupsRequest( parent="parent_value", - capacity_commitment=reservation.CapacityCommitment(name="name_value"), + page_token="page_token_value", ) -def test_list_capacity_commitments_rest_use_cached_wrapped_rpc(): +def test_list_reservation_groups_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11222,7 +11966,7 @@ def test_list_capacity_commitments_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_capacity_commitments + client._transport.list_reservation_groups in client._transport._wrapped_methods ) @@ -11232,290 +11976,505 @@ def test_list_capacity_commitments_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_capacity_commitments + client._transport.list_reservation_groups ] = mock_rpc - request = {} - client.list_capacity_commitments(request) + client.list_reservation_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_capacity_commitments(request) + client.list_reservation_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_capacity_commitments_rest_required_fields( - request_type=reservation.ListCapacityCommitmentsRequest, +@pytest.mark.asyncio +async def test_list_reservation_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ReservationServiceRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_reservation_groups + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_capacity_commitments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_reservation_groups + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.list_reservation_groups(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_capacity_commitments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) + await client.list_reservation_groups(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + +@pytest.mark.asyncio +async def test_list_reservation_groups_async( + transport: str = "grpc_asyncio", + request_type=reservation.ListReservationGroupsRequest, +): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = reservation.ListCapacityCommitmentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListReservationGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_reservation_groups(request) - # Convert return value to protobuf type - return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = reservation.ListReservationGroupsRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReservationGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" - response = client.list_capacity_commitments(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_list_reservation_groups_async_from_dict(): + await test_list_reservation_groups_async(request_type=dict) -def test_list_capacity_commitments_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_list_reservation_groups_field_headers(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.list_capacity_commitments._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.ListReservationGroupsRequest() + request.parent = "parent_value" -def test_list_capacity_commitments_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + call.return_value = reservation.ListReservationGroupsResponse() + client.list_reservation_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_reservation_groups_field_headers_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = reservation.ListReservationGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListReservationGroupsResponse() + ) + await client.list_reservation_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_reservation_groups_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reservation.ListCapacityCommitmentsResponse() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListReservationGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_reservation_groups( + parent="parent_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_list_reservation_groups_flattened_error(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_reservation_groups( + reservation.ListReservationGroupsRequest(), parent="parent_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_capacity_commitments(**mock_args) +@pytest.mark.asyncio +async def test_list_reservation_groups_flattened_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = reservation.ListReservationGroupsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListReservationGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_reservation_groups( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/capacityCommitments" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_list_capacity_commitments_rest_flattened_error(transport: str = "rest"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_list_reservation_groups_flattened_error_async(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_capacity_commitments( - reservation.ListCapacityCommitmentsRequest(), + await client.list_reservation_groups( + reservation.ListReservationGroupsRequest(), parent="parent_value", ) -def test_list_capacity_commitments_rest_pager(transport: str = "rest"): +def test_list_reservation_groups_pager(transport_name: str = "grpc"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + reservation.ReservationGroup(), ], next_page_token="abc", ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[], + reservation.ListReservationGroupsResponse( + reservation_groups=[], next_page_token="def", ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), ], next_page_token="ghi", ), - reservation.ListCapacityCommitmentsResponse( - capacity_commitments=[ - reservation.CapacityCommitment(), - reservation.CapacityCommitment(), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), ], ), + RuntimeError, ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple( - reservation.ListCapacityCommitmentsResponse.to_json(x) for x in response + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + pager = client.list_reservation_groups(request={}, retry=retry, timeout=timeout) - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_capacity_commitments(request=sample_request) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 - assert all(isinstance(i, reservation.CapacityCommitment) for i in results) + assert all(isinstance(i, reservation.ReservationGroup) for i in results) - pages = list(client.list_capacity_commitments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_list_reservation_groups_pages(transport_name: str = "grpc"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) -def test_get_capacity_commitment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + reservation.ReservationGroup(), + ], + next_page_token="abc", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[], + next_page_token="def", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + ], + next_page_token="ghi", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + ], + ), + RuntimeError, ) + pages = list(client.list_reservation_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert ( - client._transport.get_capacity_commitment - in client._transport._wrapped_methods - ) +@pytest.mark.asyncio +async def test_list_reservation_groups_async_pager(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + reservation.ReservationGroup(), + ], + next_page_token="abc", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[], + next_page_token="def", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + ], + next_page_token="ghi", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + ], + ), + RuntimeError, ) - client._transport._wrapped_methods[ - client._transport.get_capacity_commitment - ] = mock_rpc + async_pager = await client.list_reservation_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - request = {} - client.get_capacity_commitment(request) + assert len(responses) == 6 + assert all(isinstance(i, reservation.ReservationGroup) for i in responses) + + +@pytest.mark.asyncio +async def test_list_reservation_groups_async_pages(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + reservation.ReservationGroup(), + ], + next_page_token="abc", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[], + next_page_token="def", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + ], + next_page_token="ghi", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_reservation_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_reservation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_reservation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_reservation + ] = mock_rpc + + request = {} + client.create_reservation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_capacity_commitment(request) + client.create_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_capacity_commitment_rest_required_fields( - request_type=reservation.GetCapacityCommitmentRequest, +def test_create_reservation_rest_required_fields( + request_type=gcbr_reservation.CreateReservationRequest, ): transport_class = transports.ReservationServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11526,21 +12485,23 @@ def test_get_capacity_commitment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_capacity_commitment._get_unset_required_fields(jsonified_request) + ).create_reservation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_capacity_commitment._get_unset_required_fields(jsonified_request) + ).create_reservation._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("reservation_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11549,7 +12510,7 @@ def test_get_capacity_commitment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() + return_value = gcbr_reservation.Reservation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11561,39 +12522,40 @@ def test_get_capacity_commitment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) + return_value = gcbr_reservation.Reservation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_capacity_commitment(request) + response = client.create_reservation(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_capacity_commitment_rest_unset_required_fields(): +def test_create_reservation_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_capacity_commitment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.create_reservation._get_unset_required_fields({}) + assert set(unset_fields) == (set(("reservationId",)) & set(("parent",))) -def test_get_capacity_commitment_rest_flattened(): +def test_create_reservation_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11602,16 +12564,16 @@ def test_get_capacity_commitment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() + return_value = gcbr_reservation.Reservation() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + reservation_id="reservation_id_value", ) mock_args.update(sample_request) @@ -11619,26 +12581,26 @@ def test_get_capacity_commitment_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) + return_value = gcbr_reservation.Reservation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_capacity_commitment(**mock_args) + client.create_reservation(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}" + "%s/v1/{parent=projects/*/locations/*}/reservations" % client.transport._host, args[1], ) -def test_get_capacity_commitment_rest_flattened_error(transport: str = "rest"): +def test_create_reservation_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11647,13 +12609,15 @@ def test_get_capacity_commitment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_capacity_commitment( - reservation.GetCapacityCommitmentRequest(), - name="name_value", + client.create_reservation( + gcbr_reservation.CreateReservationRequest(), + parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + reservation_id="reservation_id_value", ) -def test_delete_capacity_commitment_rest_use_cached_wrapped_rpc(): +def test_list_reservations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11667,10 +12631,7 @@ def test_delete_capacity_commitment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_capacity_commitment - in client._transport._wrapped_methods - ) + assert client._transport.list_reservations in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -11678,29 +12639,29 @@ def test_delete_capacity_commitment_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_capacity_commitment + client._transport.list_reservations ] = mock_rpc request = {} - client.delete_capacity_commitment(request) + client.list_reservations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_capacity_commitment(request) + client.list_reservations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_capacity_commitment_rest_required_fields( - request_type=reservation.DeleteCapacityCommitmentRequest, +def test_list_reservations_rest_required_fields( + request_type=reservation.ListReservationsRequest, ): transport_class = transports.ReservationServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11711,23 +12672,28 @@ def test_delete_capacity_commitment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_capacity_commitment._get_unset_required_fields(jsonified_request) + ).list_reservations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_capacity_commitment._get_unset_required_fields(jsonified_request) + ).list_reservations._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("force",)) + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11736,7 +12702,7 @@ def test_delete_capacity_commitment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = reservation.ListReservationsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11748,36 +12714,47 @@ def test_delete_capacity_commitment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = reservation.ListReservationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_capacity_commitment(request) + response = client.list_reservations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_capacity_commitment_rest_unset_required_fields(): +def test_list_reservations_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_capacity_commitment._get_unset_required_fields({}) - assert set(unset_fields) == (set(("force",)) & set(("name",))) + unset_fields = transport.list_reservations._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_delete_capacity_commitment_rest_flattened(): +def test_list_reservations_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11786,41 +12763,41 @@ def test_delete_capacity_commitment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = reservation.ListReservationsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = reservation.ListReservationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_capacity_commitment(**mock_args) + client.list_reservations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}" + "%s/v1/{parent=projects/*/locations/*}/reservations" % client.transport._host, args[1], ) -def test_delete_capacity_commitment_rest_flattened_error(transport: str = "rest"): +def test_list_reservations_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -11829,118 +12806,76 @@ def test_delete_capacity_commitment_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_capacity_commitment( - reservation.DeleteCapacityCommitmentRequest(), - name="name_value", - ) - - -def test_update_capacity_commitment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.update_capacity_commitment - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + client.list_reservations( + reservation.ListReservationsRequest(), + parent="parent_value", ) - client._transport._wrapped_methods[ - client._transport.update_capacity_commitment - ] = mock_rpc - - request = {} - client.update_capacity_commitment(request) - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.update_capacity_commitment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_update_capacity_commitment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_list_reservations_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "capacity_commitment": { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } - } - - # get truthy value for each flattened field - mock_args = dict( - capacity_commitment=reservation.CapacityCommitment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + reservation.Reservation(), + ], + next_page_token="abc", + ), + reservation.ListReservationsResponse( + reservations=[], + next_page_token="def", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + ], + next_page_token="ghi", + ), + reservation.ListReservationsResponse( + reservations=[ + reservation.Reservation(), + reservation.Reservation(), + ], + ), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.update_capacity_commitment(**mock_args) + # Two responses for two calls + response = response + response - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{capacity_commitment.name=projects/*/locations/*/capacityCommitments/*}" - % client.transport._host, - args[1], + # Wrap the values into proper Response objs + response = tuple( + reservation.ListReservationsResponse.to_json(x) for x in response ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -def test_update_capacity_commitment_rest_flattened_error(transport: str = "rest"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + pager = client.list_reservations(request=sample_request) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_capacity_commitment( - reservation.UpdateCapacityCommitmentRequest(), - capacity_commitment=reservation.CapacityCommitment(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Reservation) for i in results) + pages = list(client.list_reservations(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_split_capacity_commitment_rest_use_cached_wrapped_rpc(): + +def test_get_reservation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11954,35 +12889,30 @@ def test_split_capacity_commitment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.split_capacity_commitment - in client._transport._wrapped_methods - ) + assert client._transport.get_reservation in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.split_capacity_commitment - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_reservation] = mock_rpc request = {} - client.split_capacity_commitment(request) + client.get_reservation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.split_capacity_commitment(request) + client.get_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_split_capacity_commitment_rest_required_fields( - request_type=reservation.SplitCapacityCommitmentRequest, +def test_get_reservation_rest_required_fields( + request_type=reservation.GetReservationRequest, ): transport_class = transports.ReservationServiceRestTransport @@ -11998,7 +12928,7 @@ def test_split_capacity_commitment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).split_capacity_commitment._get_unset_required_fields(jsonified_request) + ).get_reservation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12007,7 +12937,7 @@ def test_split_capacity_commitment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).split_capacity_commitment._get_unset_required_fields(jsonified_request) + ).get_reservation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -12021,7 +12951,7 @@ def test_split_capacity_commitment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reservation.SplitCapacityCommitmentResponse() + return_value = reservation.Reservation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12033,40 +12963,39 @@ def test_split_capacity_commitment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) + return_value = reservation.Reservation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.split_capacity_commitment(request) + response = client.get_reservation(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_split_capacity_commitment_rest_unset_required_fields(): +def test_get_reservation_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.split_capacity_commitment._get_unset_required_fields({}) + unset_fields = transport.get_reservation._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_split_capacity_commitment_rest_flattened(): +def test_get_reservation_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12075,17 +13004,16 @@ def test_split_capacity_commitment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.SplitCapacityCommitmentResponse() + return_value = reservation.Reservation() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + "name": "projects/sample1/locations/sample2/reservations/sample3" } # get truthy value for each flattened field mock_args = dict( name="name_value", - slot_count=1098, ) mock_args.update(sample_request) @@ -12093,26 +13021,26 @@ def test_split_capacity_commitment_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) + return_value = reservation.Reservation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.split_capacity_commitment(**mock_args) + client.get_reservation(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}:split" + "%s/v1/{name=projects/*/locations/*/reservations/*}" % client.transport._host, args[1], ) -def test_split_capacity_commitment_rest_flattened_error(transport: str = "rest"): +def test_get_reservation_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12121,14 +13049,13 @@ def test_split_capacity_commitment_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.split_capacity_commitment( - reservation.SplitCapacityCommitmentRequest(), + client.get_reservation( + reservation.GetReservationRequest(), name="name_value", - slot_count=1098, ) -def test_merge_capacity_commitments_rest_use_cached_wrapped_rpc(): +def test_delete_reservation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12143,8 +13070,7 @@ def test_merge_capacity_commitments_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.merge_capacity_commitments - in client._transport._wrapped_methods + client._transport.delete_reservation in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12153,163 +13079,63 @@ def test_merge_capacity_commitments_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.merge_capacity_commitments + client._transport.delete_reservation ] = mock_rpc request = {} - client.merge_capacity_commitments(request) + client.delete_reservation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.merge_capacity_commitments(request) + client.delete_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_merge_capacity_commitments_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_delete_reservation_rest_required_fields( + request_type=reservation.DeleteReservationRequest, +): + transport_class = transports.ReservationServiceRestTransport - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # verify fields with default values are dropped - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - capacity_commitment_ids=["capacity_commitment_ids_value"], - ) - mock_args.update(sample_request) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # verify required fields with default values are now present - client.merge_capacity_commitments(**mock_args) + jsonified_request["name"] = "name_value" - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/capacityCommitments:merge" - % client.transport._host, - args[1], - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_merge_capacity_commitments_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.merge_capacity_commitments( - reservation.MergeCapacityCommitmentsRequest(), - parent="parent_value", - capacity_commitment_ids=["capacity_commitment_ids_value"], - ) - - -def test_create_assignment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_assignment in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.create_assignment - ] = mock_rpc - - request = {} - client.create_assignment(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_assignment(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_create_assignment_rest_required_fields( - request_type=reservation.CreateAssignmentRequest, -): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_assignment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_assignment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("assignment_id",)) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12321,40 +13147,36 @@ def test_create_assignment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_assignment(request) + response = client.delete_reservation(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_assignment_rest_unset_required_fields(): +def test_delete_reservation_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_assignment._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assignmentId",)) & set(("parent",))) + unset_fields = transport.delete_reservation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_create_assignment_rest_flattened(): +def test_delete_reservation_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12363,44 +13185,41 @@ def test_create_assignment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/reservations/sample3" + "name": "projects/sample1/locations/sample2/reservations/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - assignment=reservation.Assignment(name="name_value"), + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_assignment(**mock_args) + client.delete_reservation(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/reservations/*}/assignments" + "%s/v1/{name=projects/*/locations/*/reservations/*}" % client.transport._host, args[1], ) -def test_create_assignment_rest_flattened_error(transport: str = "rest"): +def test_delete_reservation_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12409,14 +13228,13 @@ def test_create_assignment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_assignment( - reservation.CreateAssignmentRequest(), - parent="parent_value", - assignment=reservation.Assignment(name="name_value"), + client.delete_reservation( + reservation.DeleteReservationRequest(), + name="name_value", ) -def test_list_assignments_rest_use_cached_wrapped_rpc(): +def test_update_reservation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12430,7 +13248,9 @@ def test_list_assignments_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_assignments in client._transport._wrapped_methods + assert ( + client._transport.update_reservation in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12438,122 +13258,23 @@ def test_list_assignments_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_assignments + client._transport.update_reservation ] = mock_rpc request = {} - client.list_assignments(request) + client.update_reservation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_assignments(request) + client.update_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_assignments_rest_required_fields( - request_type=reservation.ListAssignmentsRequest, -): - transport_class = transports.ReservationServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_assignments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_assignments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = reservation.ListAssignmentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.ListAssignmentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_assignments(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_assignments_rest_unset_required_fields(): - transport = transports.ReservationServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.list_assignments._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) - - -def test_list_assignments_rest_flattened(): +def test_update_reservation_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12562,16 +13283,19 @@ def test_list_assignments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.ListAssignmentsResponse() + return_value = gcbr_reservation.Reservation() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/reservations/sample3" + "reservation": { + "name": "projects/sample1/locations/sample2/reservations/sample3" + } } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + reservation=gcbr_reservation.Reservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -12579,26 +13303,26 @@ def test_list_assignments_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.ListAssignmentsResponse.pb(return_value) + return_value = gcbr_reservation.Reservation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_assignments(**mock_args) + client.update_reservation(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/reservations/*}/assignments" + "%s/v1/{reservation.name=projects/*/locations/*/reservations/*}" % client.transport._host, args[1], ) -def test_list_assignments_rest_flattened_error(transport: str = "rest"): +def test_update_reservation_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12607,78 +13331,14 @@ def test_list_assignments_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_assignments( - reservation.ListAssignmentsRequest(), - parent="parent_value", + client.update_reservation( + gcbr_reservation.UpdateReservationRequest(), + reservation=gcbr_reservation.Reservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_list_assignments_rest_pager(transport: str = "rest"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token="abc", - ), - reservation.ListAssignmentsResponse( - assignments=[], - next_page_token="def", - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token="ghi", - ), - reservation.ListAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - reservation.ListAssignmentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/reservations/sample3" - } - - pager = client.list_assignments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) for i in results) - - pages = list(client.list_assignments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_delete_assignment_rest_use_cached_wrapped_rpc(): +def test_failover_reservation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12692,7 +13352,9 @@ def test_delete_assignment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_assignment in client._transport._wrapped_methods + assert ( + client._transport.failover_reservation in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12700,24 +13362,24 @@ def test_delete_assignment_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.delete_assignment + client._transport.failover_reservation ] = mock_rpc request = {} - client.delete_assignment(request) + client.failover_reservation(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_assignment(request) + client.failover_reservation(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_assignment_rest_required_fields( - request_type=reservation.DeleteAssignmentRequest, +def test_failover_reservation_rest_required_fields( + request_type=reservation.FailoverReservationRequest, ): transport_class = transports.ReservationServiceRestTransport @@ -12733,7 +13395,7 @@ def test_delete_assignment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_assignment._get_unset_required_fields(jsonified_request) + ).failover_reservation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12742,7 +13404,7 @@ def test_delete_assignment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_assignment._get_unset_required_fields(jsonified_request) + ).failover_reservation._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -12756,7 +13418,7 @@ def test_delete_assignment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = reservation.Reservation() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12768,94 +13430,40 @@ def test_delete_assignment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_assignment(request) + response = client.failover_reservation(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_assignment_rest_unset_required_fields(): +def test_failover_reservation_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_assignment._get_unset_required_fields({}) + unset_fields = transport.failover_reservation._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_assignment_rest_flattened(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_assignment(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/reservations/*/assignments/*}" - % client.transport._host, - args[1], - ) - - -def test_delete_assignment_rest_flattened_error(transport: str = "rest"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_assignment( - reservation.DeleteAssignmentRequest(), - name="name_value", - ) - - -def test_search_assignments_rest_use_cached_wrapped_rpc(): +def test_create_capacity_commitment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12870,7 +13478,8 @@ def test_search_assignments_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.search_assignments in client._transport._wrapped_methods + client._transport.create_capacity_commitment + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -12879,24 +13488,24 @@ def test_search_assignments_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.search_assignments + client._transport.create_capacity_commitment ] = mock_rpc request = {} - client.search_assignments(request) + client.create_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.search_assignments(request) + client.create_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_search_assignments_rest_required_fields( - request_type=reservation.SearchAssignmentsRequest, +def test_create_capacity_commitment_rest_required_fields( + request_type=reservation.CreateCapacityCommitmentRequest, ): transport_class = transports.ReservationServiceRestTransport @@ -12912,7 +13521,7 @@ def test_search_assignments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_assignments._get_unset_required_fields(jsonified_request) + ).create_capacity_commitment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12921,13 +13530,12 @@ def test_search_assignments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_assignments._get_unset_required_fields(jsonified_request) + ).create_capacity_commitment._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "page_size", - "page_token", - "query", + "capacity_commitment_id", + "enforce_single_admin_project_per_org", ) ) jsonified_request.update(unset_fields) @@ -12943,7 +13551,7 @@ def test_search_assignments_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reservation.SearchAssignmentsResponse() + return_value = reservation.CapacityCommitment() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12955,48 +13563,48 @@ def test_search_assignments_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.SearchAssignmentsResponse.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_assignments(request) + response = client.create_capacity_commitment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_search_assignments_rest_unset_required_fields(): +def test_create_capacity_commitment_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.search_assignments._get_unset_required_fields({}) + unset_fields = transport.create_capacity_commitment._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "pageSize", - "pageToken", - "query", + "capacityCommitmentId", + "enforceSingleAdminProjectPerOrg", ) ) & set(("parent",)) ) -def test_search_assignments_rest_flattened(): +def test_create_capacity_commitment_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13005,7 +13613,7 @@ def test_search_assignments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.SearchAssignmentsResponse() + return_value = reservation.CapacityCommitment() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -13013,7 +13621,7 @@ def test_search_assignments_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - query="query_value", + capacity_commitment=reservation.CapacityCommitment(name="name_value"), ) mock_args.update(sample_request) @@ -13021,26 +13629,26 @@ def test_search_assignments_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.SearchAssignmentsResponse.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_assignments(**mock_args) + client.create_capacity_commitment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}:searchAssignments" + "%s/v1/{parent=projects/*/locations/*}/capacityCommitments" % client.transport._host, args[1], ) -def test_search_assignments_rest_flattened_error(transport: str = "rest"): +def test_create_capacity_commitment_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13049,84 +13657,21 @@ def test_search_assignments_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.search_assignments( - reservation.SearchAssignmentsRequest(), + client.create_capacity_commitment( + reservation.CreateCapacityCommitmentRequest(), parent="parent_value", - query="query_value", + capacity_commitment=reservation.CapacityCommitment(name="name_value"), ) -def test_search_assignments_rest_pager(transport: str = "rest"): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), - ], - next_page_token="abc", - ), - reservation.SearchAssignmentsResponse( - assignments=[], - next_page_token="def", - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - ], - next_page_token="ghi", - ), - reservation.SearchAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - reservation.SearchAssignmentsResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.search_assignments(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) for i in results) - - pages = list(client.search_assignments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_search_all_assignments_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_list_capacity_commitments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -13134,7 +13679,7 @@ def test_search_all_assignments_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.search_all_assignments + client._transport.list_capacity_commitments in client._transport._wrapped_methods ) @@ -13144,24 +13689,24 @@ def test_search_all_assignments_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.search_all_assignments + client._transport.list_capacity_commitments ] = mock_rpc request = {} - client.search_all_assignments(request) + client.list_capacity_commitments(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.search_all_assignments(request) + client.list_capacity_commitments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_search_all_assignments_rest_required_fields( - request_type=reservation.SearchAllAssignmentsRequest, +def test_list_capacity_commitments_rest_required_fields( + request_type=reservation.ListCapacityCommitmentsRequest, ): transport_class = transports.ReservationServiceRestTransport @@ -13177,7 +13722,7 @@ def test_search_all_assignments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_all_assignments._get_unset_required_fields(jsonified_request) + ).list_capacity_commitments._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -13186,13 +13731,12 @@ def test_search_all_assignments_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_all_assignments._get_unset_required_fields(jsonified_request) + ).list_capacity_commitments._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( "page_size", "page_token", - "query", ) ) jsonified_request.update(unset_fields) @@ -13208,7 +13752,7 @@ def test_search_all_assignments_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reservation.SearchAllAssignmentsResponse() + return_value = reservation.ListCapacityCommitmentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13229,39 +13773,38 @@ def test_search_all_assignments_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) + return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_all_assignments(request) + response = client.list_capacity_commitments(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_search_all_assignments_rest_unset_required_fields(): +def test_list_capacity_commitments_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.search_all_assignments._get_unset_required_fields({}) + unset_fields = transport.list_capacity_commitments._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( "pageSize", "pageToken", - "query", ) ) & set(("parent",)) ) -def test_search_all_assignments_rest_flattened(): +def test_list_capacity_commitments_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13270,7 +13813,7 @@ def test_search_all_assignments_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.SearchAllAssignmentsResponse() + return_value = reservation.ListCapacityCommitmentsResponse() # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -13278,7 +13821,6 @@ def test_search_all_assignments_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", - query="query_value", ) mock_args.update(sample_request) @@ -13286,26 +13828,26 @@ def test_search_all_assignments_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) + return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_all_assignments(**mock_args) + client.list_capacity_commitments(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}:searchAllAssignments" + "%s/v1/{parent=projects/*/locations/*}/capacityCommitments" % client.transport._host, args[1], ) -def test_search_all_assignments_rest_flattened_error(transport: str = "rest"): +def test_list_capacity_commitments_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13314,14 +13856,13 @@ def test_search_all_assignments_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.search_all_assignments( - reservation.SearchAllAssignmentsRequest(), + client.list_capacity_commitments( + reservation.ListCapacityCommitmentsRequest(), parent="parent_value", - query="query_value", ) -def test_search_all_assignments_rest_pager(transport: str = "rest"): +def test_list_capacity_commitments_rest_pager(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13333,28 +13874,28 @@ def test_search_all_assignments_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), - reservation.Assignment(), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), ], next_page_token="abc", ), - reservation.SearchAllAssignmentsResponse( - assignments=[], + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[], next_page_token="def", ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), ], next_page_token="ghi", ), - reservation.SearchAllAssignmentsResponse( - assignments=[ - reservation.Assignment(), - reservation.Assignment(), + reservation.ListCapacityCommitmentsResponse( + capacity_commitments=[ + reservation.CapacityCommitment(), + reservation.CapacityCommitment(), ], ), ) @@ -13363,7 +13904,7 @@ def test_search_all_assignments_rest_pager(transport: str = "rest"): # Wrap the values into proper Response objs response = tuple( - reservation.SearchAllAssignmentsResponse.to_json(x) for x in response + reservation.ListCapacityCommitmentsResponse.to_json(x) for x in response ) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): @@ -13373,18 +13914,18 @@ def test_search_all_assignments_rest_pager(transport: str = "rest"): sample_request = {"parent": "projects/sample1/locations/sample2"} - pager = client.search_all_assignments(request=sample_request) + pager = client.list_capacity_commitments(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, reservation.Assignment) for i in results) + assert all(isinstance(i, reservation.CapacityCommitment) for i in results) - pages = list(client.search_all_assignments(request=sample_request).pages) + pages = list(client.list_capacity_commitments(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_move_assignment_rest_use_cached_wrapped_rpc(): +def test_get_capacity_commitment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13398,30 +13939,35 @@ def test_move_assignment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.move_assignment in client._transport._wrapped_methods + assert ( + client._transport.get_capacity_commitment + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.move_assignment] = mock_rpc + client._transport._wrapped_methods[ + client._transport.get_capacity_commitment + ] = mock_rpc request = {} - client.move_assignment(request) + client.get_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.move_assignment(request) + client.get_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_move_assignment_rest_required_fields( - request_type=reservation.MoveAssignmentRequest, +def test_get_capacity_commitment_rest_required_fields( + request_type=reservation.GetCapacityCommitmentRequest, ): transport_class = transports.ReservationServiceRestTransport @@ -13437,7 +13983,7 @@ def test_move_assignment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).move_assignment._get_unset_required_fields(jsonified_request) + ).get_capacity_commitment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -13446,7 +13992,7 @@ def test_move_assignment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).move_assignment._get_unset_required_fields(jsonified_request) + ).get_capacity_commitment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13460,7 +14006,7 @@ def test_move_assignment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() + return_value = reservation.CapacityCommitment() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13472,40 +14018,39 @@ def test_move_assignment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.move_assignment(request) + response = client.get_capacity_commitment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_move_assignment_rest_unset_required_fields(): +def test_get_capacity_commitment_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.move_assignment._get_unset_required_fields({}) + unset_fields = transport.get_capacity_commitment._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_move_assignment_rest_flattened(): +def test_get_capacity_commitment_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13514,17 +14059,16 @@ def test_move_assignment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() + return_value = reservation.CapacityCommitment() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" } # get truthy value for each flattened field mock_args = dict( name="name_value", - destination_id="destination_id_value", ) mock_args.update(sample_request) @@ -13532,26 +14076,26 @@ def test_move_assignment_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.move_assignment(**mock_args) + client.get_capacity_commitment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/reservations/*/assignments/*}:move" + "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}" % client.transport._host, args[1], ) -def test_move_assignment_rest_flattened_error(transport: str = "rest"): +def test_get_capacity_commitment_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13560,14 +14104,13 @@ def test_move_assignment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.move_assignment( - reservation.MoveAssignmentRequest(), + client.get_capacity_commitment( + reservation.GetCapacityCommitmentRequest(), name="name_value", - destination_id="destination_id_value", ) -def test_update_assignment_rest_use_cached_wrapped_rpc(): +def test_delete_capacity_commitment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13581,7 +14124,10 @@ def test_update_assignment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_assignment in client._transport._wrapped_methods + assert ( + client._transport.delete_capacity_commitment + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -13589,43 +14135,225 @@ def test_update_assignment_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_assignment + client._transport.delete_capacity_commitment ] = mock_rpc request = {} - client.update_assignment(request) + client.delete_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_assignment(request) + client.delete_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_assignment_rest_flattened(): +def test_delete_capacity_commitment_rest_required_fields( + request_type=reservation.DeleteCapacityCommitmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_capacity_commitment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_capacity_commitment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = None # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = reservation.Assignment() - - # get arguments that satisfy an http rule for this method - sample_request = { - "assignment": { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, } - } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_capacity_commitment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_capacity_commitment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_capacity_commitment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +def test_delete_capacity_commitment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } # get truthy value for each flattened field mock_args = dict( - assignment=reservation.Assignment(name="name_value"), + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_capacity_commitment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_capacity_commitment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_capacity_commitment( + reservation.DeleteCapacityCommitmentRequest(), + name="name_value", + ) + + +def test_update_capacity_commitment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_capacity_commitment + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_capacity_commitment + ] = mock_rpc + + request = {} + client.update_capacity_commitment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_capacity_commitment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_capacity_commitment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.CapacityCommitment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "capacity_commitment": { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + capacity_commitment=reservation.CapacityCommitment(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -13634,26 +14362,26 @@ def test_update_assignment_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_assignment(**mock_args) + client.update_capacity_commitment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{assignment.name=projects/*/locations/*/reservations/*/assignments/*}" + "%s/v1/{capacity_commitment.name=projects/*/locations/*/capacityCommitments/*}" % client.transport._host, args[1], ) -def test_update_assignment_rest_flattened_error(transport: str = "rest"): +def test_update_capacity_commitment_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13662,14 +14390,14 @@ def test_update_assignment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_assignment( - reservation.UpdateAssignmentRequest(), - assignment=reservation.Assignment(name="name_value"), + client.update_capacity_commitment( + reservation.UpdateCapacityCommitmentRequest(), + capacity_commitment=reservation.CapacityCommitment(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_get_bi_reservation_rest_use_cached_wrapped_rpc(): +def test_split_capacity_commitment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13684,7 +14412,8 @@ def test_get_bi_reservation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_bi_reservation in client._transport._wrapped_methods + client._transport.split_capacity_commitment + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13693,24 +14422,24 @@ def test_get_bi_reservation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_bi_reservation + client._transport.split_capacity_commitment ] = mock_rpc request = {} - client.get_bi_reservation(request) + client.split_capacity_commitment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_bi_reservation(request) + client.split_capacity_commitment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_bi_reservation_rest_required_fields( - request_type=reservation.GetBiReservationRequest, +def test_split_capacity_commitment_rest_required_fields( + request_type=reservation.SplitCapacityCommitmentRequest, ): transport_class = transports.ReservationServiceRestTransport @@ -13726,7 +14455,7 @@ def test_get_bi_reservation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_bi_reservation._get_unset_required_fields(jsonified_request) + ).split_capacity_commitment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -13735,7 +14464,7 @@ def test_get_bi_reservation_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_bi_reservation._get_unset_required_fields(jsonified_request) + ).split_capacity_commitment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13749,7 +14478,7 @@ def test_get_bi_reservation_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation() + return_value = reservation.SplitCapacityCommitmentResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13761,39 +14490,40 @@ def test_get_bi_reservation_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) + return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_bi_reservation(request) + response = client.split_capacity_commitment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_bi_reservation_rest_unset_required_fields(): +def test_split_capacity_commitment_rest_unset_required_fields(): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_bi_reservation._get_unset_required_fields({}) + unset_fields = transport.split_capacity_commitment._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_bi_reservation_rest_flattened(): +def test_split_capacity_commitment_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13802,14 +14532,17 @@ def test_get_bi_reservation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation() + return_value = reservation.SplitCapacityCommitmentResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/biReservation"} + sample_request = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } # get truthy value for each flattened field mock_args = dict( name="name_value", + slot_count=1098, ) mock_args.update(sample_request) @@ -13817,26 +14550,26 @@ def test_get_bi_reservation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) + return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_bi_reservation(**mock_args) + client.split_capacity_commitment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/biReservation}" + "%s/v1/{name=projects/*/locations/*/capacityCommitments/*}:split" % client.transport._host, args[1], ) -def test_get_bi_reservation_rest_flattened_error(transport: str = "rest"): +def test_split_capacity_commitment_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13845,13 +14578,14 @@ def test_get_bi_reservation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_bi_reservation( - reservation.GetBiReservationRequest(), + client.split_capacity_commitment( + reservation.SplitCapacityCommitmentRequest(), name="name_value", + slot_count=1098, ) -def test_update_bi_reservation_rest_use_cached_wrapped_rpc(): +def test_merge_capacity_commitments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13866,7 +14600,7 @@ def test_update_bi_reservation_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.update_bi_reservation + client._transport.merge_capacity_commitments in client._transport._wrapped_methods ) @@ -13876,23 +14610,23 @@ def test_update_bi_reservation_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_bi_reservation + client._transport.merge_capacity_commitments ] = mock_rpc request = {} - client.update_bi_reservation(request) + client.merge_capacity_commitments(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_bi_reservation(request) + client.merge_capacity_commitments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_bi_reservation_rest_flattened(): +def test_merge_capacity_commitments_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13901,19 +14635,15 @@ def test_update_bi_reservation_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation() + return_value = reservation.CapacityCommitment() # get arguments that satisfy an http rule for this method - sample_request = { - "bi_reservation": { - "name": "projects/sample1/locations/sample2/biReservation" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - bi_reservation=reservation.BiReservation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", + capacity_commitment_ids=["capacity_commitment_ids_value"], ) mock_args.update(sample_request) @@ -13921,26 +14651,26 @@ def test_update_bi_reservation_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_bi_reservation(**mock_args) + client.merge_capacity_commitments(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{bi_reservation.name=projects/*/locations/*/biReservation}" + "%s/v1/{parent=projects/*/locations/*}/capacityCommitments:merge" % client.transport._host, args[1], ) -def test_update_bi_reservation_rest_flattened_error(transport: str = "rest"): +def test_merge_capacity_commitments_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13949,1220 +14679,4417 @@ def test_update_bi_reservation_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_bi_reservation( - reservation.UpdateBiReservationRequest(), - bi_reservation=reservation.BiReservation(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.merge_capacity_commitments( + reservation.MergeCapacityCommitmentsRequest(), + parent="parent_value", + capacity_commitment_ids=["capacity_commitment_ids_value"], ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_create_assignment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ReservationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + transport="rest", ) - # It is an error to provide an api_key and a transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ReservationServiceClient( - client_options=options, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ReservationServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Ensure method has been cached + assert client._transport.create_assignment in client._transport._wrapped_methods - # It is an error to provide scopes and a transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ReservationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.create_assignment + ] = mock_rpc + request = {} + client.create_assignment(request) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ReservationServiceClient(transport=transport) - assert client.transport is transport + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.create_assignment(request) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ReservationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - transport = transports.ReservationServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel +def test_create_assignment_rest_required_fields( + request_type=reservation.CreateAssignmentRequest, +): + transport_class = transports.ReservationServiceRestTransport -@pytest.mark.parametrize( - "transport_class", - [ - transports.ReservationServiceGrpcTransport, - transports.ReservationServiceGrpcAsyncIOTransport, - transports.ReservationServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -def test_transport_kind_grpc(): - transport = ReservationServiceClient.get_transport_class("grpc")( + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + ).create_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_initialize_client_w_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - assert client is not None + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_assignment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("assignment_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_reservation_empty_call_grpc(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), "__call__" - ) as call: - call.return_value = gcbr_reservation.Reservation() - client.create_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.CreateReservationRequest() - - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_reservations_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Convert return value to protobuf type + return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), "__call__" - ) as call: - call.return_value = reservation.ListReservationsResponse() - client.list_reservations(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListReservationsRequest() + response = client.create_assignment(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_create_assignment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: - call.return_value = reservation.Reservation() - client.get_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetReservationRequest() - - assert args[0] == request_msg + unset_fields = transport.create_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assignmentId",)) & set(("parent",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_reservation_empty_call_grpc(): +def test_create_assignment_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), "__call__" - ) as call: - call.return_value = None - client.delete_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteReservationRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() - assert args[0] == request_msg + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/reservations/sample3" + } + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + assignment=reservation.Assignment(name="name_value"), + ) + mock_args.update(sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), "__call__" - ) as call: - call.return_value = gcbr_reservation.Reservation() - client.update_reservation(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.UpdateReservationRequest() + client.create_assignment(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/reservations/*}/assignments" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_failover_reservation_empty_call_grpc(): +def test_create_assignment_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), "__call__" - ) as call: - call.return_value = reservation.Reservation() - client.failover_reservation(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_assignment( + reservation.CreateAssignmentRequest(), + parent="parent_value", + assignment=reservation.Assignment(name="name_value"), + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.FailoverReservationRequest() - assert args[0] == request_msg +def test_list_assignments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_capacity_commitment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.list_assignments in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), "__call__" - ) as call: - call.return_value = reservation.CapacityCommitment() - client.create_capacity_commitment(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_assignments + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateCapacityCommitmentRequest() + request = {} + client.list_assignments(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_assignments(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_capacity_commitments_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_assignments_rest_required_fields( + request_type=reservation.ListAssignmentsRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), "__call__" - ) as call: - call.return_value = reservation.ListCapacityCommitmentsResponse() - client.list_capacity_commitments(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListCapacityCommitmentsRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_assignments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_assignments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_capacity_commitment_empty_call_grpc(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), "__call__" - ) as call: - call.return_value = reservation.CapacityCommitment() - client.get_capacity_commitment(request=None) + # Designate an appropriate value for the returned response. + return_value = reservation.ListAssignmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetCapacityCommitmentRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = reservation.ListAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_capacity_commitment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.list_assignments(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), "__call__" - ) as call: - call.return_value = None - client.delete_capacity_commitment(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteCapacityCommitmentRequest() - assert args[0] == request_msg +def test_list_assignments_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.list_assignments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_capacity_commitment_empty_call_grpc(): + +def test_list_assignments_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), "__call__" - ) as call: - call.return_value = reservation.CapacityCommitment() - client.update_capacity_commitment(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListAssignmentsResponse() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateCapacityCommitmentRequest() + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/reservations/sample3" + } - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.ListAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_assignments(**mock_args) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_split_capacity_commitment_empty_call_grpc(): + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/reservations/*}/assignments" + % client.transport._host, + args[1], + ) + + +def test_list_assignments_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), "__call__" - ) as call: - call.return_value = reservation.SplitCapacityCommitmentResponse() - client.split_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SplitCapacityCommitmentRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assignments( + reservation.ListAssignmentsRequest(), + parent="parent_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_merge_capacity_commitments_empty_call_grpc(): +def test_list_assignments_rest_pager(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), "__call__" - ) as call: - call.return_value = reservation.CapacityCommitment() - client.merge_capacity_commitments(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.ListAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.ListAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + ) + # Two responses for two calls + response = response + response - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MergeCapacityCommitmentsRequest() + # Wrap the values into proper Response objs + response = tuple( + reservation.ListAssignmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert args[0] == request_msg + sample_request = { + "parent": "projects/sample1/locations/sample2/reservations/sample3" + } + pager = client.list_assignments(request=sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_assignment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), "__call__" - ) as call: - call.return_value = reservation.Assignment() - client.create_assignment(request=None) + pages = list(client.list_assignments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateAssignmentRequest() - assert args[0] == request_msg +def test_delete_assignment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_assignments_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.delete_assignment in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: - call.return_value = reservation.ListAssignmentsResponse() - client.list_assignments(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_assignment + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListAssignmentsRequest() + request = {} + client.delete_assignment(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.delete_assignment(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_assignment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), "__call__" - ) as call: - call.return_value = None - client.delete_assignment(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteAssignmentRequest() +def test_delete_assignment_rest_required_fields( + request_type=reservation.DeleteAssignmentRequest, +): + transport_class = transports.ReservationServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_assignments_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), "__call__" - ) as call: - call.return_value = reservation.SearchAssignmentsResponse() - client.search_assignments(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAssignmentsRequest() + jsonified_request["name"] = "name_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_all_assignments_empty_call_grpc(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), "__call__" - ) as call: - call.return_value = reservation.SearchAllAssignmentsResponse() - client.search_all_assignments(request=None) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAllAssignmentsRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_assignment(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_move_assignment_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: - call.return_value = reservation.Assignment() - client.move_assignment(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MoveAssignmentRequest() +def test_delete_assignment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.delete_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_assignment_empty_call_grpc(): +def test_delete_assignment_rest_flattened(): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_assignment), "__call__" - ) as call: - call.return_value = reservation.Assignment() - client.update_assignment(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateAssignmentRequest() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_assignment(**mock_args) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_bi_reservation_empty_call_grpc(): + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reservations/*/assignments/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_assignment_rest_flattened_error(transport: str = "rest"): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), "__call__" - ) as call: - call.return_value = reservation.BiReservation() - client.get_bi_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetBiReservationRequest() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_assignment( + reservation.DeleteAssignmentRequest(), + name="name_value", + ) - assert args[0] == request_msg +def test_search_assignments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_bi_reservation_empty_call_grpc(): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_bi_reservation), "__call__" - ) as call: - call.return_value = reservation.BiReservation() - client.update_bi_reservation(request=None) + # Ensure method has been cached + assert ( + client._transport.search_assignments in client._transport._wrapped_methods + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateBiReservationRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_assignments + ] = mock_rpc - assert args[0] == request_msg + request = {} + client.search_assignments(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_transport_kind_grpc_asyncio(): - transport = ReservationServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + client.search_assignments(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_initialize_client_w_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None +def test_search_assignments_rest_required_fields( + request_type=reservation.SearchAssignmentsRequest, +): + transport_class = transports.ReservationServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_reservation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcbr_reservation.Reservation( - name="name_value", - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location="primary_location_value", - secondary_location="secondary_location_value", - original_primary_location="original_primary_location_value", - max_slots=986, - scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, - ) - ) - await client.create_reservation(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.CreateReservationRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_assignments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_reservations_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_reservations), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.ListReservationsResponse( - next_page_token="next_page_token_value", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_assignments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "query", ) - await client.list_reservations(request=None) + ) + jsonified_request.update(unset_fields) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListReservationsRequest() + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - assert args[0] == request_msg + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAssignmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + response_value = Response() + response_value.status_code = 200 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.Reservation( - name="name_value", - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location="primary_location_value", - secondary_location="secondary_location_value", - original_primary_location="original_primary_location_value", - max_slots=986, - scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, - ) - ) - await client.get_reservation(request=None) + # Convert return value to protobuf type + return_value = reservation.SearchAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetReservationRequest() + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + response = client.search_assignments(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_search_assignments_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_reservation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_reservation(request=None) + unset_fields = transport.search_assignments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "query", + ) + ) + & set(("parent",)) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteReservationRequest() - assert args[0] == request_msg +def test_search_assignments_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAssignmentsResponse() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_reservation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gcbr_reservation.Reservation( - name="name_value", - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location="primary_location_value", - secondary_location="secondary_location_value", - original_primary_location="original_primary_location_value", - max_slots=986, - scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + query="query_value", ) - await client.update_reservation(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = gcbr_reservation.UpdateReservationRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.SearchAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.search_assignments(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}:searchAssignments" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_failover_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_search_assignments_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.failover_reservation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.Reservation( - name="name_value", - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location="primary_location_value", - secondary_location="secondary_location_value", - original_primary_location="original_primary_location_value", - max_slots=986, - scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_assignments( + reservation.SearchAssignmentsRequest(), + parent="parent_value", + query="query_value", ) - await client.failover_reservation(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.FailoverReservationRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_search_assignments_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_capacity_commitment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.CapacityCommitment( - name="name_value", - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), ) - await client.create_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateCapacityCommitmentRequest() + # Two responses for two calls + response = response + response - assert args[0] == request_msg + # Wrap the values into proper Response objs + response = tuple( + reservation.SearchAssignmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_capacity_commitments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + pager = client.search_assignments(request=sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_capacity_commitments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.ListCapacityCommitmentsResponse( - next_page_token="next_page_token_value", - ) - ) - await client.list_capacity_commitments(request=None) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListCapacityCommitmentsRequest() + pages = list(client.search_assignments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - assert args[0] == request_msg +def test_search_all_assignments_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_capacity_commitment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.CapacityCommitment( - name="name_value", - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) + # Ensure method has been cached + assert ( + client._transport.search_all_assignments + in client._transport._wrapped_methods ) - await client.get_capacity_commitment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetCapacityCommitmentRequest() - assert args[0] == request_msg + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_all_assignments + ] = mock_rpc + request = {} + client.search_all_assignments(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_capacity_commitment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_capacity_commitment(request=None) + client.search_all_assignments(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteCapacityCommitmentRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_search_all_assignments_rest_required_fields( + request_type=reservation.SearchAllAssignmentsRequest, +): + transport_class = transports.ReservationServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_capacity_commitment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.CapacityCommitment( - name="name_value", - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - ) - await client.update_capacity_commitment(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateCapacityCommitmentRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_assignments._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_split_capacity_commitment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_all_assignments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "query", + ) ) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.split_capacity_commitment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.SplitCapacityCommitmentResponse() - ) - await client.split_capacity_commitment(request=None) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SplitCapacityCommitmentRequest() - - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_merge_capacity_commitments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.merge_capacity_commitments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.CapacityCommitment( - name="name_value", - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, - ) - ) - await client.merge_capacity_commitments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MergeCapacityCommitmentsRequest() - - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAllAssignmentsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_assignment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_assignment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.Assignment( - name="name_value", - assignee="assignee_value", - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - ) - ) - await client.create_assignment(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.CreateAssignmentRequest() + response = client.search_all_assignments(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_assignments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_search_all_assignments_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.ListAssignmentsResponse( - next_page_token="next_page_token_value", + unset_fields = transport.search_all_assignments._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "query", ) ) - await client.list_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.ListAssignmentsRequest() - - assert args[0] == request_msg + & set(("parent",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_assignment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_search_all_assignments_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_assignment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_assignment(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.SearchAllAssignmentsResponse() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.DeleteAssignmentRequest() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + query="query_value", + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_assignments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + client.search_all_assignments(**mock_args) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_assignments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.SearchAssignmentsResponse( - next_page_token="next_page_token_value", - ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}:searchAllAssignments" + % client.transport._host, + args[1], ) - await client.search_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAssignmentsRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_all_assignments_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_search_all_assignments_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.search_all_assignments), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.SearchAllAssignmentsResponse( - next_page_token="next_page_token_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_assignments( + reservation.SearchAllAssignmentsRequest(), + parent="parent_value", + query="query_value", ) - await client.search_all_assignments(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.SearchAllAssignmentsRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_move_assignment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_search_all_assignments_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + reservation.Assignment(), + ], + next_page_token="abc", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[], + next_page_token="def", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + ], + next_page_token="ghi", + ), + reservation.SearchAllAssignmentsResponse( + assignments=[ + reservation.Assignment(), + reservation.Assignment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + reservation.SearchAllAssignmentsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.search_all_assignments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.Assignment) for i in results) + + pages = list(client.search_all_assignments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_move_assignment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.move_assignment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.move_assignment] = mock_rpc + + request = {} + client.move_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.move_assignment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_move_assignment_rest_required_fields( + request_type=reservation.MoveAssignmentRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).move_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).move_assignment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.move_assignment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_move_assignment_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.move_assignment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_move_assignment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + destination_id="destination_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.move_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reservations/*/assignments/*}:move" + % client.transport._host, + args[1], + ) + + +def test_move_assignment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.move_assignment( + reservation.MoveAssignmentRequest(), + name="name_value", + destination_id="destination_id_value", + ) + + +def test_update_assignment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_assignment in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_assignment + ] = mock_rpc + + request = {} + client.update_assignment(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_assignment(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_assignment_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Assignment() + + # get arguments that satisfy an http rule for this method + sample_request = { + "assignment": { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + assignment=reservation.Assignment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.Assignment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_assignment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{assignment.name=projects/*/locations/*/reservations/*/assignments/*}" + % client.transport._host, + args[1], + ) + + +def test_update_assignment_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_assignment( + reservation.UpdateAssignmentRequest(), + assignment=reservation.Assignment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_get_bi_reservation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_bi_reservation in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_bi_reservation + ] = mock_rpc + + request = {} + client.get_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_bi_reservation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_bi_reservation_rest_required_fields( + request_type=reservation.GetBiReservationRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_bi_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_bi_reservation._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.BiReservation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_bi_reservation(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_bi_reservation_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_bi_reservation._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_bi_reservation_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.BiReservation() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/biReservation"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_bi_reservation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/biReservation}" + % client.transport._host, + args[1], + ) + + +def test_get_bi_reservation_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_bi_reservation( + reservation.GetBiReservationRequest(), + name="name_value", + ) + + +def test_update_bi_reservation_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_bi_reservation + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_bi_reservation + ] = mock_rpc + + request = {} + client.update_bi_reservation(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_bi_reservation(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_bi_reservation_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.BiReservation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "bi_reservation": { + "name": "projects/sample1/locations/sample2/biReservation" + } + } + + # get truthy value for each flattened field + mock_args = dict( + bi_reservation=reservation.BiReservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_bi_reservation(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{bi_reservation.name=projects/*/locations/*/biReservation}" + % client.transport._host, + args[1], + ) + + +def test_update_bi_reservation_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_bi_reservation( + reservation.UpdateBiReservationRequest(), + bi_reservation=reservation.BiReservation(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("options",)) & set(("resource",))) + + +def test_get_iam_policy_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/reservations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{resource=projects/*/locations/*/reservations/*}:getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + iam_policy_pb2.GetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "policy", + ) + ) + ) + + +def test_set_iam_policy_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "resource": "projects/sample1/locations/sample2/reservations/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{resource=projects/*/locations/*/reservations/*}:setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + iam_policy_pb2.SetIamPolicyRequest(), + resource="resource_value", + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.test_iam_permissions + ] = mock_rpc + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["resource"] = "" + request_init["permissions"] = "" + request = request_type(**request_init) + pb_request = request + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resource"] = "resource_value" + jsonified_request["permissions"] = "permissions_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "permissions" in jsonified_request + assert jsonified_request["permissions"] == "permissions_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "resource", + "permissions", + ) + ) + ) + + +def test_create_reservation_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_reservation_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_reservation_group + ] = mock_rpc + + request = {} + client.create_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_reservation_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_reservation_group_rest_required_fields( + request_type=reservation.CreateReservationGroupRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["reservation_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "reservationGroupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_reservation_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "reservationGroupId" in jsonified_request + assert ( + jsonified_request["reservationGroupId"] == request_init["reservation_group_id"] + ) + + jsonified_request["parent"] = "parent_value" + jsonified_request["reservationGroupId"] = "reservation_group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_reservation_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("reservation_group_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "reservationGroupId" in jsonified_request + assert jsonified_request["reservationGroupId"] == "reservation_group_id_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.ReservationGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = reservation.ReservationGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_reservation_group(request) + + expected_params = [ + ( + "reservationGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_reservation_group_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_reservation_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("reservationGroupId",)) + & set( + ( + "parent", + "reservationGroupId", + "reservationGroup", + ) + ) + ) + + +def test_get_reservation_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_reservation_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_reservation_group + ] = mock_rpc + + request = {} + client.get_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_reservation_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_reservation_group_rest_required_fields( + request_type=reservation.GetReservationGroupRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_reservation_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_reservation_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.ReservationGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = reservation.ReservationGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_reservation_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_reservation_group_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_reservation_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_reservation_group_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ReservationGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reservationGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.ReservationGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_reservation_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reservationGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_reservation_group_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_reservation_group( + reservation.GetReservationGroupRequest(), + name="name_value", + ) + + +def test_delete_reservation_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_reservation_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_reservation_group + ] = mock_rpc + + request = {} + client.delete_reservation_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_reservation_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_reservation_group_rest_required_fields( + request_type=reservation.DeleteReservationGroupRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_reservation_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_reservation_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_reservation_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_reservation_group_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_reservation_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_reservation_group_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/reservationGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_reservation_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/reservationGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_reservation_group_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_reservation_group( + reservation.DeleteReservationGroupRequest(), + name="name_value", + ) + + +def test_list_reservation_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_reservation_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_reservation_groups + ] = mock_rpc + + request = {} + client.list_reservation_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_reservation_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_reservation_groups_rest_required_fields( + request_type=reservation.ListReservationGroupsRequest, +): + transport_class = transports.ReservationServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_reservation_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_reservation_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = reservation.ListReservationGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = reservation.ListReservationGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_reservation_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_reservation_groups_rest_unset_required_fields(): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_reservation_groups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_reservation_groups_rest_flattened(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListReservationGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = reservation.ListReservationGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_reservation_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/reservationGroups" + % client.transport._host, + args[1], + ) + + +def test_list_reservation_groups_rest_flattened_error(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_reservation_groups( + reservation.ListReservationGroupsRequest(), + parent="parent_value", + ) + + +def test_list_reservation_groups_rest_pager(transport: str = "rest"): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + reservation.ReservationGroup(), + ], + next_page_token="abc", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[], + next_page_token="def", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + ], + next_page_token="ghi", + ), + reservation.ListReservationGroupsResponse( + reservation_groups=[ + reservation.ReservationGroup(), + reservation.ReservationGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + reservation.ListReservationGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_reservation_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, reservation.ReservationGroup) for i in results) + + pages = list(client.list_reservation_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ReservationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ReservationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ReservationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ReservationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ReservationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ReservationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ReservationServiceGrpcTransport, + transports.ReservationServiceGrpcAsyncIOTransport, + transports.ReservationServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ReservationServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_reservation_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + call.return_value = gcbr_reservation.Reservation() + client.create_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcbr_reservation.CreateReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_reservations_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + call.return_value = reservation.ListReservationsResponse() + client.list_reservations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListReservationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_reservation_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + call.return_value = reservation.Reservation() + client.get_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_reservation_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + call.return_value = None + client.delete_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_reservation_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + call.return_value = gcbr_reservation.Reservation() + client.update_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcbr_reservation.UpdateReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_failover_reservation_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.failover_reservation), "__call__" + ) as call: + call.return_value = reservation.Reservation() + client.failover_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.FailoverReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_capacity_commitment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + call.return_value = reservation.CapacityCommitment() + client.create_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.CreateCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_capacity_commitments_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + call.return_value = reservation.ListCapacityCommitmentsResponse() + client.list_capacity_commitments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListCapacityCommitmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_capacity_commitment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + call.return_value = reservation.CapacityCommitment() + client.get_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_capacity_commitment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + call.return_value = None + client.delete_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_capacity_commitment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + call.return_value = reservation.CapacityCommitment() + client.update_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.UpdateCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_split_capacity_commitment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + call.return_value = reservation.SplitCapacityCommitmentResponse() + client.split_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.SplitCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_merge_capacity_commitments_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + call.return_value = reservation.CapacityCommitment() + client.merge_capacity_commitments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.MergeCapacityCommitmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_assignment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + call.return_value = reservation.Assignment() + client.create_assignment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.CreateAssignmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_assignments_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + call.return_value = reservation.ListAssignmentsResponse() + client.list_assignments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListAssignmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_assignment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + call.return_value = None + client.delete_assignment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteAssignmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_assignments_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + call.return_value = reservation.SearchAssignmentsResponse() + client.search_assignments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.SearchAssignmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_assignments_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + call.return_value = reservation.SearchAllAssignmentsResponse() + client.search_all_assignments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.SearchAllAssignmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_move_assignment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + call.return_value = reservation.Assignment() + client.move_assignment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.MoveAssignmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_assignment_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: + call.return_value = reservation.Assignment() + client.update_assignment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.UpdateAssignmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_bi_reservation_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + call.return_value = reservation.BiReservation() + client.get_bi_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetBiReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bi_reservation_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + call.return_value = reservation.BiReservation() + client.update_bi_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.UpdateBiReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_reservation_group_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation_group), "__call__" + ) as call: + call.return_value = reservation.ReservationGroup() + client.create_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.CreateReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_reservation_group_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + call.return_value = reservation.ReservationGroup() + client.get_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_reservation_group_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + call.return_value = None + client.delete_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_reservation_groups_empty_call_grpc(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + call.return_value = reservation.ListReservationGroupsResponse() + client.list_reservation_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListReservationGroupsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ReservationServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_reservation_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + primary_location="primary_location_value", + secondary_location="secondary_location_value", + original_primary_location="original_primary_location_value", + max_slots=986, + scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", + ) + ) + await client.create_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcbr_reservation.CreateReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_reservations_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_reservations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListReservationsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_reservations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListReservationsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_reservation_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_reservation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + primary_location="primary_location_value", + secondary_location="secondary_location_value", + original_primary_location="original_primary_location_value", + max_slots=986, + scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", + ) + ) + await client.get_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_reservation_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_reservation_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + primary_location="primary_location_value", + secondary_location="secondary_location_value", + original_primary_location="original_primary_location_value", + max_slots=986, + scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", + ) + ) + await client.update_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcbr_reservation.UpdateReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_failover_reservation_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.failover_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + primary_location="primary_location_value", + secondary_location="secondary_location_value", + original_primary_location="original_primary_location_value", + max_slots=986, + scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", + ) + ) + await client.failover_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.FailoverReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_capacity_commitment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + is_flat_rate=True, + ) + ) + await client.create_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.CreateCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_capacity_commitments_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListCapacityCommitmentsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_capacity_commitments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListCapacityCommitmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_capacity_commitment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + is_flat_rate=True, + ) + ) + await client.get_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_capacity_commitment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_capacity_commitment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + is_flat_rate=True, + ) + ) + await client.update_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.UpdateCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_split_capacity_commitment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.split_capacity_commitment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SplitCapacityCommitmentResponse() + ) + await client.split_capacity_commitment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.SplitCapacityCommitmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_merge_capacity_commitments_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.merge_capacity_commitments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.CapacityCommitment( + name="name_value", + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + is_flat_rate=True, + ) + ) + await client.merge_capacity_commitments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.MergeCapacityCommitmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_assignment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + enable_gemini_in_bigquery=True, + ) + ) + await client.create_assignment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.CreateAssignmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_assignments_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_assignments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListAssignmentsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_assignments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListAssignmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_assignment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_assignment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_assignment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteAssignmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_assignments_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SearchAssignmentsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.search_assignments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.SearchAssignmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_all_assignments_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_assignments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.SearchAllAssignmentsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.search_all_assignments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.SearchAllAssignmentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_move_assignment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + enable_gemini_in_bigquery=True, + ) + ) + await client.move_assignment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.MoveAssignmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_assignment_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.move_assignment), "__call__") as call: + with mock.patch.object( + type(client.transport.update_assignment), "__call__" + ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( reservation.Assignment( @@ -15173,118 +19100,1342 @@ async def test_move_assignment_empty_call_grpc_asyncio(): enable_gemini_in_bigquery=True, ) ) - await client.move_assignment(request=None) + await client.update_assignment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.UpdateAssignmentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_bi_reservation_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.BiReservation( + name="name_value", + size=443, + ) + ) + await client.get_bi_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetBiReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bi_reservation_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bi_reservation), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.BiReservation( + name="name_value", + size=443, + ) + ) + await client.update_bi_reservation(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.UpdateBiReservationRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_iam_policy_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + await client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_iam_policy_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + await client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_test_iam_permissions_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + await client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_reservation_group_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ReservationGroup( + name="name_value", + ) + ) + await client.create_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.CreateReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_reservation_group_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ReservationGroup( + name="name_value", + ) + ) + await client.get_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_reservation_group_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_reservation_groups_empty_call_grpc_asyncio(): + client = ReservationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + reservation.ListReservationGroupsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_reservation_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListReservationGroupsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ReservationServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_reservation_rest_bad_request( + request_type=gcbr_reservation.CreateReservationRequest, +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_reservation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcbr_reservation.CreateReservationRequest, + dict, + ], +) +def test_create_reservation_rest_call_success(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["reservation"] = { + "name": "name_value", + "slot_capacity": 1391, + "ignore_idle_slots": True, + "autoscale": {"current_slots": 1431, "max_slots": 986}, + "concurrency": 1195, + "creation_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "multi_region_auxiliary": True, + "edition": 1, + "primary_location": "primary_location_value", + "secondary_location": "secondary_location_value", + "original_primary_location": "original_primary_location_value", + "max_slots": 986, + "scaling_mode": 1, + "labels": {}, + "reservation_group": "reservation_group_value", + "replication_status": { + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "last_error_time": {}, + "last_replication_time": {}, + "soft_failover_start_time": {}, + }, + "scheduling_policy": {"concurrency": 1195, "max_slots": 986}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcbr_reservation.CreateReservationRequest.meta.fields["reservation"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["reservation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["reservation"][field])): + del request_init["reservation"][field][i][subfield] + else: + del request_init["reservation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + primary_location="primary_location_value", + secondary_location="secondary_location_value", + original_primary_location="original_primary_location_value", + max_slots=986, + scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcbr_reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_reservation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbr_reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == gcbr_reservation.Edition.STANDARD + assert response.primary_location == "primary_location_value" + assert response.secondary_location == "secondary_location_value" + assert response.original_primary_location == "original_primary_location_value" + assert response.max_slots == 986 + assert ( + response.scaling_mode == gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + ) + assert response.reservation_group == "reservation_group_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_create_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_create_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_create_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcbr_reservation.CreateReservationRequest.pb( + gcbr_reservation.CreateReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcbr_reservation.Reservation.to_json( + gcbr_reservation.Reservation() + ) + req.return_value.content = return_value + + request = gcbr_reservation.CreateReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcbr_reservation.Reservation() + post_with_metadata.return_value = gcbr_reservation.Reservation(), metadata + + client.create_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_reservations_rest_bad_request( + request_type=reservation.ListReservationsRequest, +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_reservations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListReservationsRequest, + dict, + ], +) +def test_list_reservations_rest_call_success(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.ListReservationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = reservation.ListReservationsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_reservations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListReservationsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_reservations_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_list_reservations" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_list_reservations_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_list_reservations" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = reservation.ListReservationsRequest.pb( + reservation.ListReservationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = reservation.ListReservationsResponse.to_json( + reservation.ListReservationsResponse() + ) + req.return_value.content = return_value + + request = reservation.ListReservationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.ListReservationsResponse() + post_with_metadata.return_value = ( + reservation.ListReservationsResponse(), + metadata, + ) + + client.list_reservations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_reservation_rest_bad_request( + request_type=reservation.GetReservationRequest, +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_reservation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.GetReservationRequest, + dict, + ], +) +def test_get_reservation_rest_call_success(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + primary_location="primary_location_value", + secondary_location="secondary_location_value", + original_primary_location="original_primary_location_value", + max_slots=986, + scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_reservation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + assert response.primary_location == "primary_location_value" + assert response.secondary_location == "secondary_location_value" + assert response.original_primary_location == "original_primary_location_value" + assert response.max_slots == 986 + assert response.scaling_mode == reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + assert response.reservation_group == "reservation_group_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_get_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_get_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_get_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = reservation.GetReservationRequest.pb( + reservation.GetReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = reservation.Reservation.to_json(reservation.Reservation()) + req.return_value.content = return_value + + request = reservation.GetReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.Reservation() + post_with_metadata.return_value = reservation.Reservation(), metadata + + client.get_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_reservation_rest_bad_request( + request_type=reservation.DeleteReservationRequest, +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_reservation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.DeleteReservationRequest, + dict, + ], +) +def test_delete_reservation_rest_call_success(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_reservation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_delete_reservation" + ) as pre: + pre.assert_not_called() + pb_message = reservation.DeleteReservationRequest.pb( + reservation.DeleteReservationRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = reservation.DeleteReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_update_reservation_rest_bad_request( + request_type=gcbr_reservation.UpdateReservationRequest, +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "reservation": { + "name": "projects/sample1/locations/sample2/reservations/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_reservation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcbr_reservation.UpdateReservationRequest, + dict, + ], +) +def test_update_reservation_rest_call_success(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "reservation": { + "name": "projects/sample1/locations/sample2/reservations/sample3" + } + } + request_init["reservation"] = { + "name": "projects/sample1/locations/sample2/reservations/sample3", + "slot_capacity": 1391, + "ignore_idle_slots": True, + "autoscale": {"current_slots": 1431, "max_slots": 986}, + "concurrency": 1195, + "creation_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "multi_region_auxiliary": True, + "edition": 1, + "primary_location": "primary_location_value", + "secondary_location": "secondary_location_value", + "original_primary_location": "original_primary_location_value", + "max_slots": 986, + "scaling_mode": 1, + "labels": {}, + "reservation_group": "reservation_group_value", + "replication_status": { + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "last_error_time": {}, + "last_replication_time": {}, + "soft_failover_start_time": {}, + }, + "scheduling_policy": {"concurrency": 1195, "max_slots": 986}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcbr_reservation.UpdateReservationRequest.meta.fields["reservation"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["reservation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["reservation"][field])): + del request_init["reservation"][field][i][subfield] + else: + del request_init["reservation"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcbr_reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=gcbr_reservation.Edition.STANDARD, + primary_location="primary_location_value", + secondary_location="secondary_location_value", + original_primary_location="original_primary_location_value", + max_slots=986, + scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.MoveAssignmentRequest() + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = gcbr_reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_reservation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcbr_reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == gcbr_reservation.Edition.STANDARD + assert response.primary_location == "primary_location_value" + assert response.secondary_location == "secondary_location_value" + assert response.original_primary_location == "original_primary_location_value" + assert response.max_slots == 986 + assert ( + response.scaling_mode == gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + ) + assert response.reservation_group == "reservation_group_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_assignment_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), ) + client = ReservationServiceClient(transport=transport) - # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_assignment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.Assignment( - name="name_value", - assignee="assignee_value", - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, - ) + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_update_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_update_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_update_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcbr_reservation.UpdateReservationRequest.pb( + gcbr_reservation.UpdateReservationRequest() ) - await client.update_assignment(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateAssignmentRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcbr_reservation.Reservation.to_json( + gcbr_reservation.Reservation() + ) + req.return_value.content = return_value - assert args[0] == request_msg + request = gcbr_reservation.UpdateReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcbr_reservation.Reservation() + post_with_metadata.return_value = gcbr_reservation.Reservation(), metadata + + client.update_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_bi_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_failover_reservation_rest_bad_request( + request_type=reservation.FailoverReservationRequest, +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_bi_reservation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.BiReservation( - name="name_value", - size=443, - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.failover_reservation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + reservation.FailoverReservationRequest, + dict, + ], +) +def test_failover_reservation_rest_call_success(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = reservation.Reservation( + name="name_value", + slot_capacity=1391, + ignore_idle_slots=True, + concurrency=1195, + multi_region_auxiliary=True, + edition=reservation.Edition.STANDARD, + primary_location="primary_location_value", + secondary_location="secondary_location_value", + original_primary_location="original_primary_location_value", + max_slots=986, + scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + reservation_group="reservation_group_value", ) - await client.get_bi_reservation(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.GetBiReservationRequest() + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = reservation.Reservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.failover_reservation(request) + # Establish that the response is the type that we expect. + assert isinstance(response, reservation.Reservation) + assert response.name == "name_value" + assert response.slot_capacity == 1391 + assert response.ignore_idle_slots is True + assert response.concurrency == 1195 + assert response.multi_region_auxiliary is True + assert response.edition == reservation.Edition.STANDARD + assert response.primary_location == "primary_location_value" + assert response.secondary_location == "secondary_location_value" + assert response.original_primary_location == "original_primary_location_value" + assert response.max_slots == 986 + assert response.scaling_mode == reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + assert response.reservation_group == "reservation_group_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_bi_reservation_empty_call_grpc_asyncio(): - client = ReservationServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_failover_reservation_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), ) + client = ReservationServiceClient(transport=transport) - # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_bi_reservation), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - reservation.BiReservation( - name="name_value", - size=443, - ) + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "post_failover_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_failover_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_failover_reservation" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = reservation.FailoverReservationRequest.pb( + reservation.FailoverReservationRequest() ) - await client.update_bi_reservation(request=None) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = reservation.UpdateBiReservationRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = reservation.Reservation.to_json(reservation.Reservation()) + req.return_value.content = return_value - assert args[0] == request_msg + request = reservation.FailoverReservationRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = reservation.Reservation() + post_with_metadata.return_value = reservation.Reservation(), metadata + client.failover_reservation( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = ReservationServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_create_reservation_rest_bad_request( - request_type=gcbr_reservation.CreateReservationRequest, +def test_create_capacity_commitment_rest_bad_request( + request_type=reservation.CreateCapacityCommitmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -15305,60 +20456,53 @@ def test_create_reservation_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_reservation(request) + client.create_capacity_commitment(request) @pytest.mark.parametrize( "request_type", [ - gcbr_reservation.CreateReservationRequest, + reservation.CreateCapacityCommitmentRequest, dict, ], ) -def test_create_reservation_rest_call_success(request_type): +def test_create_capacity_commitment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["reservation"] = { + request_init["capacity_commitment"] = { "name": "name_value", - "slot_capacity": 1391, - "ignore_idle_slots": True, - "autoscale": {"current_slots": 1431, "max_slots": 986}, - "concurrency": 1195, - "creation_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, + "slot_count": 1098, + "plan": 3, + "state": 1, + "commitment_start_time": {"seconds": 751, "nanos": 543}, + "commitment_end_time": {}, + "failure_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "renewal_plan": 3, "multi_region_auxiliary": True, "edition": 1, - "primary_location": "primary_location_value", - "secondary_location": "secondary_location_value", - "original_primary_location": "original_primary_location_value", - "max_slots": 986, - "scaling_mode": 1, - "replication_status": { - "error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "last_error_time": {}, - "last_replication_time": {}, - "soft_failover_start_time": {}, - }, + "is_flat_rate": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcbr_reservation.CreateReservationRequest.meta.fields["reservation"] + test_field = reservation.CreateCapacityCommitmentRequest.meta.fields[ + "capacity_commitment" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -15386,7 +20530,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["reservation"].items(): # pragma: NO COVER + for field, value in request_init["capacity_commitment"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -15416,27 +20560,24 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["reservation"][field])): - del request_init["reservation"][field][i][subfield] + for i in range(0, len(request_init["capacity_commitment"][field])): + del request_init["capacity_commitment"][field][i][subfield] else: - del request_init["reservation"][field][subfield] + del request_init["capacity_commitment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation( + return_value = reservation.CapacityCommitment( name="name_value", - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location="primary_location_value", - secondary_location="secondary_location_value", - original_primary_location="original_primary_location_value", - max_slots=986, - scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + edition=reservation.Edition.STANDARD, + is_flat_rate=True, ) # Wrap the value into a proper Response obj @@ -15444,32 +20585,27 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_reservation(request) + response = client.create_capacity_commitment(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcbr_reservation.Reservation) + assert isinstance(response, reservation.CapacityCommitment) assert response.name == "name_value" - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX assert response.multi_region_auxiliary is True - assert response.edition == gcbr_reservation.Edition.STANDARD - assert response.primary_location == "primary_location_value" - assert response.secondary_location == "secondary_location_value" - assert response.original_primary_location == "original_primary_location_value" - assert response.max_slots == 986 - assert ( - response.scaling_mode == gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY - ) + assert response.edition == reservation.Edition.STANDARD + assert response.is_flat_rate is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_reservation_rest_interceptors(null_interceptor): +def test_create_capacity_commitment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15483,18 +20619,18 @@ def test_create_reservation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_create_reservation" + transports.ReservationServiceRestInterceptor, "post_create_capacity_commitment" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_create_reservation_with_metadata", + "post_create_capacity_commitment_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_create_reservation" + transports.ReservationServiceRestInterceptor, "pre_create_capacity_commitment" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcbr_reservation.CreateReservationRequest.pb( - gcbr_reservation.CreateReservationRequest() + pb_message = reservation.CreateCapacityCommitmentRequest.pb( + reservation.CreateCapacityCommitmentRequest() ) transcode.return_value = { "method": "post", @@ -15506,21 +20642,21 @@ def test_create_reservation_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcbr_reservation.Reservation.to_json( - gcbr_reservation.Reservation() + return_value = reservation.CapacityCommitment.to_json( + reservation.CapacityCommitment() ) req.return_value.content = return_value - request = gcbr_reservation.CreateReservationRequest() + request = reservation.CreateCapacityCommitmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcbr_reservation.Reservation() - post_with_metadata.return_value = gcbr_reservation.Reservation(), metadata + post.return_value = reservation.CapacityCommitment() + post_with_metadata.return_value = reservation.CapacityCommitment(), metadata - client.create_reservation( + client.create_capacity_commitment( request, metadata=[ ("key", "val"), @@ -15533,8 +20669,8 @@ def test_create_reservation_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_reservations_rest_bad_request( - request_type=reservation.ListReservationsRequest, +def test_list_capacity_commitments_rest_bad_request( + request_type=reservation.ListCapacityCommitmentsRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -15555,17 +20691,17 @@ def test_list_reservations_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_reservations(request) + client.list_capacity_commitments(request) @pytest.mark.parametrize( "request_type", [ - reservation.ListReservationsRequest, + reservation.ListCapacityCommitmentsRequest, dict, ], ) -def test_list_reservations_rest_call_success(request_type): +def test_list_capacity_commitments_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -15577,7 +20713,7 @@ def test_list_reservations_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.ListReservationsResponse( + return_value = reservation.ListCapacityCommitmentsResponse( next_page_token="next_page_token_value", ) @@ -15586,20 +20722,20 @@ def test_list_reservations_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.ListReservationsResponse.pb(return_value) + return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_reservations(request) + response = client.list_capacity_commitments(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReservationsPager) + assert isinstance(response, pagers.ListCapacityCommitmentsPager) assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_reservations_rest_interceptors(null_interceptor): +def test_list_capacity_commitments_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15613,18 +20749,18 @@ def test_list_reservations_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_list_reservations" + transports.ReservationServiceRestInterceptor, "post_list_capacity_commitments" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_list_reservations_with_metadata", + "post_list_capacity_commitments_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_list_reservations" + transports.ReservationServiceRestInterceptor, "pre_list_capacity_commitments" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.ListReservationsRequest.pb( - reservation.ListReservationsRequest() + pb_message = reservation.ListCapacityCommitmentsRequest.pb( + reservation.ListCapacityCommitmentsRequest() ) transcode.return_value = { "method": "post", @@ -15636,24 +20772,24 @@ def test_list_reservations_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.ListReservationsResponse.to_json( - reservation.ListReservationsResponse() + return_value = reservation.ListCapacityCommitmentsResponse.to_json( + reservation.ListCapacityCommitmentsResponse() ) req.return_value.content = return_value - request = reservation.ListReservationsRequest() + request = reservation.ListCapacityCommitmentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.ListReservationsResponse() + post.return_value = reservation.ListCapacityCommitmentsResponse() post_with_metadata.return_value = ( - reservation.ListReservationsResponse(), + reservation.ListCapacityCommitmentsResponse(), metadata, ) - client.list_reservations( + client.list_capacity_commitments( request, metadata=[ ("key", "val"), @@ -15666,14 +20802,16 @@ def test_list_reservations_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_reservation_rest_bad_request( - request_type=reservation.GetReservationRequest, +def test_get_capacity_commitment_rest_bad_request( + request_type=reservation.GetCapacityCommitmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15688,40 +20826,39 @@ def test_get_reservation_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_reservation(request) + client.get_capacity_commitment(request) @pytest.mark.parametrize( "request_type", [ - reservation.GetReservationRequest, + reservation.GetCapacityCommitmentRequest, dict, ], ) -def test_get_reservation_rest_call_success(request_type): +def test_get_capacity_commitment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.Reservation( + return_value = reservation.CapacityCommitment( name="name_value", - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, multi_region_auxiliary=True, edition=reservation.Edition.STANDARD, - primary_location="primary_location_value", - secondary_location="secondary_location_value", - original_primary_location="original_primary_location_value", - max_slots=986, - scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + is_flat_rate=True, ) # Wrap the value into a proper Response obj @@ -15729,30 +20866,27 @@ def test_get_reservation_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_reservation(request) + response = client.get_capacity_commitment(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Reservation) + assert isinstance(response, reservation.CapacityCommitment) assert response.name == "name_value" - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX assert response.multi_region_auxiliary is True assert response.edition == reservation.Edition.STANDARD - assert response.primary_location == "primary_location_value" - assert response.secondary_location == "secondary_location_value" - assert response.original_primary_location == "original_primary_location_value" - assert response.max_slots == 986 - assert response.scaling_mode == reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + assert response.is_flat_rate is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_reservation_rest_interceptors(null_interceptor): +def test_get_capacity_commitment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15766,18 +20900,18 @@ def test_get_reservation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_get_reservation" + transports.ReservationServiceRestInterceptor, "post_get_capacity_commitment" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_get_reservation_with_metadata", + "post_get_capacity_commitment_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_get_reservation" + transports.ReservationServiceRestInterceptor, "pre_get_capacity_commitment" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.GetReservationRequest.pb( - reservation.GetReservationRequest() + pb_message = reservation.GetCapacityCommitmentRequest.pb( + reservation.GetCapacityCommitmentRequest() ) transcode.return_value = { "method": "post", @@ -15789,19 +20923,21 @@ def test_get_reservation_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Reservation.to_json(reservation.Reservation()) + return_value = reservation.CapacityCommitment.to_json( + reservation.CapacityCommitment() + ) req.return_value.content = return_value - request = reservation.GetReservationRequest() + request = reservation.GetCapacityCommitmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.Reservation() - post_with_metadata.return_value = reservation.Reservation(), metadata + post.return_value = reservation.CapacityCommitment() + post_with_metadata.return_value = reservation.CapacityCommitment(), metadata - client.get_reservation( + client.get_capacity_commitment( request, metadata=[ ("key", "val"), @@ -15814,14 +20950,16 @@ def test_get_reservation_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_reservation_rest_bad_request( - request_type=reservation.DeleteReservationRequest, +def test_delete_capacity_commitment_rest_bad_request( + request_type=reservation.DeleteCapacityCommitmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15836,23 +20974,25 @@ def test_delete_reservation_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_reservation(request) + client.delete_capacity_commitment(request) @pytest.mark.parametrize( "request_type", [ - reservation.DeleteReservationRequest, + reservation.DeleteCapacityCommitmentRequest, dict, ], ) -def test_delete_reservation_rest_call_success(request_type): +def test_delete_capacity_commitment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -15867,14 +21007,14 @@ def test_delete_reservation_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_reservation(request) + response = client.delete_capacity_commitment(request) # Establish that the response is the type that we expect. assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_reservation_rest_interceptors(null_interceptor): +def test_delete_capacity_commitment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -15888,11 +21028,11 @@ def test_delete_reservation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_delete_reservation" + transports.ReservationServiceRestInterceptor, "pre_delete_capacity_commitment" ) as pre: pre.assert_not_called() - pb_message = reservation.DeleteReservationRequest.pb( - reservation.DeleteReservationRequest() + pb_message = reservation.DeleteCapacityCommitmentRequest.pb( + reservation.DeleteCapacityCommitmentRequest() ) transcode.return_value = { "method": "post", @@ -15905,14 +21045,14 @@ def test_delete_reservation_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - request = reservation.DeleteReservationRequest() + request = reservation.DeleteCapacityCommitmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_reservation( + client.delete_capacity_commitment( request, metadata=[ ("key", "val"), @@ -15923,16 +21063,16 @@ def test_delete_reservation_rest_interceptors(null_interceptor): pre.assert_called_once() -def test_update_reservation_rest_bad_request( - request_type=gcbr_reservation.UpdateReservationRequest, +def test_update_capacity_commitment_rest_bad_request( + request_type=reservation.UpdateCapacityCommitmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "reservation": { - "name": "projects/sample1/locations/sample2/reservations/sample3" + "capacity_commitment": { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" } } request = request_type(**request_init) @@ -15949,64 +21089,57 @@ def test_update_reservation_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_reservation(request) + client.update_capacity_commitment(request) @pytest.mark.parametrize( "request_type", [ - gcbr_reservation.UpdateReservationRequest, + reservation.UpdateCapacityCommitmentRequest, dict, ], ) -def test_update_reservation_rest_call_success(request_type): +def test_update_capacity_commitment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "reservation": { - "name": "projects/sample1/locations/sample2/reservations/sample3" + "capacity_commitment": { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" } } - request_init["reservation"] = { - "name": "projects/sample1/locations/sample2/reservations/sample3", - "slot_capacity": 1391, - "ignore_idle_slots": True, - "autoscale": {"current_slots": 1431, "max_slots": 986}, - "concurrency": 1195, - "creation_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, + request_init["capacity_commitment"] = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3", + "slot_count": 1098, + "plan": 3, + "state": 1, + "commitment_start_time": {"seconds": 751, "nanos": 543}, + "commitment_end_time": {}, + "failure_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "renewal_plan": 3, "multi_region_auxiliary": True, "edition": 1, - "primary_location": "primary_location_value", - "secondary_location": "secondary_location_value", - "original_primary_location": "original_primary_location_value", - "max_slots": 986, - "scaling_mode": 1, - "replication_status": { - "error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "last_error_time": {}, - "last_replication_time": {}, - "soft_failover_start_time": {}, - }, + "is_flat_rate": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = gcbr_reservation.UpdateReservationRequest.meta.fields["reservation"] + test_field = reservation.UpdateCapacityCommitmentRequest.meta.fields[ + "capacity_commitment" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -16034,7 +21167,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["reservation"].items(): # pragma: NO COVER + for field, value in request_init["capacity_commitment"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -16064,27 +21197,24 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["reservation"][field])): - del request_init["reservation"][field][i][subfield] + for i in range(0, len(request_init["capacity_commitment"][field])): + del request_init["capacity_commitment"][field][i][subfield] else: - del request_init["reservation"][field][subfield] + del request_init["capacity_commitment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = gcbr_reservation.Reservation( + return_value = reservation.CapacityCommitment( name="name_value", - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, + slot_count=1098, + plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, + state=reservation.CapacityCommitment.State.PENDING, + renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, multi_region_auxiliary=True, - edition=gcbr_reservation.Edition.STANDARD, - primary_location="primary_location_value", - secondary_location="secondary_location_value", - original_primary_location="original_primary_location_value", - max_slots=986, - scaling_mode=gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, + edition=reservation.Edition.STANDARD, + is_flat_rate=True, ) # Wrap the value into a proper Response obj @@ -16092,32 +21222,27 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = gcbr_reservation.Reservation.pb(return_value) + return_value = reservation.CapacityCommitment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_reservation(request) + response = client.update_capacity_commitment(request) # Establish that the response is the type that we expect. - assert isinstance(response, gcbr_reservation.Reservation) + assert isinstance(response, reservation.CapacityCommitment) assert response.name == "name_value" - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 + assert response.slot_count == 1098 + assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX + assert response.state == reservation.CapacityCommitment.State.PENDING + assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX assert response.multi_region_auxiliary is True - assert response.edition == gcbr_reservation.Edition.STANDARD - assert response.primary_location == "primary_location_value" - assert response.secondary_location == "secondary_location_value" - assert response.original_primary_location == "original_primary_location_value" - assert response.max_slots == 986 - assert ( - response.scaling_mode == gcbr_reservation.Reservation.ScalingMode.AUTOSCALE_ONLY - ) + assert response.edition == reservation.Edition.STANDARD + assert response.is_flat_rate is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_reservation_rest_interceptors(null_interceptor): +def test_update_capacity_commitment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16131,18 +21256,18 @@ def test_update_reservation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_update_reservation" + transports.ReservationServiceRestInterceptor, "post_update_capacity_commitment" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_update_reservation_with_metadata", + "post_update_capacity_commitment_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_update_reservation" + transports.ReservationServiceRestInterceptor, "pre_update_capacity_commitment" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = gcbr_reservation.UpdateReservationRequest.pb( - gcbr_reservation.UpdateReservationRequest() + pb_message = reservation.UpdateCapacityCommitmentRequest.pb( + reservation.UpdateCapacityCommitmentRequest() ) transcode.return_value = { "method": "post", @@ -16154,21 +21279,21 @@ def test_update_reservation_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = gcbr_reservation.Reservation.to_json( - gcbr_reservation.Reservation() + return_value = reservation.CapacityCommitment.to_json( + reservation.CapacityCommitment() ) req.return_value.content = return_value - request = gcbr_reservation.UpdateReservationRequest() + request = reservation.UpdateCapacityCommitmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = gcbr_reservation.Reservation() - post_with_metadata.return_value = gcbr_reservation.Reservation(), metadata + post.return_value = reservation.CapacityCommitment() + post_with_metadata.return_value = reservation.CapacityCommitment(), metadata - client.update_reservation( + client.update_capacity_commitment( request, metadata=[ ("key", "val"), @@ -16181,14 +21306,16 @@ def test_update_reservation_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_failover_reservation_rest_bad_request( - request_type=reservation.FailoverReservationRequest, +def test_split_capacity_commitment_rest_bad_request( + request_type=reservation.SplitCapacityCommitmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16203,71 +21330,50 @@ def test_failover_reservation_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.failover_reservation(request) + client.split_capacity_commitment(request) @pytest.mark.parametrize( "request_type", [ - reservation.FailoverReservationRequest, + reservation.SplitCapacityCommitmentRequest, dict, ], ) -def test_failover_reservation_rest_call_success(request_type): +def test_split_capacity_commitment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.Reservation( - name="name_value", - slot_capacity=1391, - ignore_idle_slots=True, - concurrency=1195, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - primary_location="primary_location_value", - secondary_location="secondary_location_value", - original_primary_location="original_primary_location_value", - max_slots=986, - scaling_mode=reservation.Reservation.ScalingMode.AUTOSCALE_ONLY, - ) + return_value = reservation.SplitCapacityCommitmentResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.Reservation.pb(return_value) + return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.failover_reservation(request) + response = client.split_capacity_commitment(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Reservation) - assert response.name == "name_value" - assert response.slot_capacity == 1391 - assert response.ignore_idle_slots is True - assert response.concurrency == 1195 - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.primary_location == "primary_location_value" - assert response.secondary_location == "secondary_location_value" - assert response.original_primary_location == "original_primary_location_value" - assert response.max_slots == 986 - assert response.scaling_mode == reservation.Reservation.ScalingMode.AUTOSCALE_ONLY + assert isinstance(response, reservation.SplitCapacityCommitmentResponse) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_failover_reservation_rest_interceptors(null_interceptor): +def test_split_capacity_commitment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16281,18 +21387,18 @@ def test_failover_reservation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_failover_reservation" + transports.ReservationServiceRestInterceptor, "post_split_capacity_commitment" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_failover_reservation_with_metadata", + "post_split_capacity_commitment_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_failover_reservation" + transports.ReservationServiceRestInterceptor, "pre_split_capacity_commitment" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.FailoverReservationRequest.pb( - reservation.FailoverReservationRequest() + pb_message = reservation.SplitCapacityCommitmentRequest.pb( + reservation.SplitCapacityCommitmentRequest() ) transcode.return_value = { "method": "post", @@ -16304,19 +21410,24 @@ def test_failover_reservation_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Reservation.to_json(reservation.Reservation()) + return_value = reservation.SplitCapacityCommitmentResponse.to_json( + reservation.SplitCapacityCommitmentResponse() + ) req.return_value.content = return_value - request = reservation.FailoverReservationRequest() + request = reservation.SplitCapacityCommitmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.Reservation() - post_with_metadata.return_value = reservation.Reservation(), metadata + post.return_value = reservation.SplitCapacityCommitmentResponse() + post_with_metadata.return_value = ( + reservation.SplitCapacityCommitmentResponse(), + metadata, + ) - client.failover_reservation( + client.split_capacity_commitment( request, metadata=[ ("key", "val"), @@ -16329,8 +21440,8 @@ def test_failover_reservation_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_capacity_commitment_rest_bad_request( - request_type=reservation.CreateCapacityCommitmentRequest, +def test_merge_capacity_commitments_rest_bad_request( + request_type=reservation.MergeCapacityCommitmentsRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -16351,114 +21462,23 @@ def test_create_capacity_commitment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_capacity_commitment(request) + client.merge_capacity_commitments(request) @pytest.mark.parametrize( "request_type", [ - reservation.CreateCapacityCommitmentRequest, + reservation.MergeCapacityCommitmentsRequest, dict, ], ) -def test_create_capacity_commitment_rest_call_success(request_type): +def test_merge_capacity_commitments_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["capacity_commitment"] = { - "name": "name_value", - "slot_count": 1098, - "plan": 3, - "state": 1, - "commitment_start_time": {"seconds": 751, "nanos": 543}, - "commitment_end_time": {}, - "failure_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "renewal_plan": 3, - "multi_region_auxiliary": True, - "edition": 1, - "is_flat_rate": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = reservation.CreateCapacityCommitmentRequest.meta.fields[ - "capacity_commitment" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["capacity_commitment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["capacity_commitment"][field])): - del request_init["capacity_commitment"][field][i][subfield] - else: - del request_init["capacity_commitment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -16485,7 +21505,7 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_capacity_commitment(request) + response = client.merge_capacity_commitments(request) # Establish that the response is the type that we expect. assert isinstance(response, reservation.CapacityCommitment) @@ -16500,7 +21520,7 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_capacity_commitment_rest_interceptors(null_interceptor): +def test_merge_capacity_commitments_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16514,18 +21534,18 @@ def test_create_capacity_commitment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_create_capacity_commitment" + transports.ReservationServiceRestInterceptor, "post_merge_capacity_commitments" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_create_capacity_commitment_with_metadata", + "post_merge_capacity_commitments_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_create_capacity_commitment" + transports.ReservationServiceRestInterceptor, "pre_merge_capacity_commitments" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.CreateCapacityCommitmentRequest.pb( - reservation.CreateCapacityCommitmentRequest() + pb_message = reservation.MergeCapacityCommitmentsRequest.pb( + reservation.MergeCapacityCommitmentsRequest() ) transcode.return_value = { "method": "post", @@ -16542,7 +21562,7 @@ def test_create_capacity_commitment_rest_interceptors(null_interceptor): ) req.return_value.content = return_value - request = reservation.CreateCapacityCommitmentRequest() + request = reservation.MergeCapacityCommitmentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -16551,7 +21571,7 @@ def test_create_capacity_commitment_rest_interceptors(null_interceptor): post.return_value = reservation.CapacityCommitment() post_with_metadata.return_value = reservation.CapacityCommitment(), metadata - client.create_capacity_commitment( + client.merge_capacity_commitments( request, metadata=[ ("key", "val"), @@ -16564,14 +21584,14 @@ def test_create_capacity_commitment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_capacity_commitments_rest_bad_request( - request_type=reservation.ListCapacityCommitmentsRequest, +def test_create_assignment_rest_bad_request( + request_type=reservation.CreateAssignmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16586,30 +21606,109 @@ def test_list_capacity_commitments_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_capacity_commitments(request) + client.create_assignment(request) @pytest.mark.parametrize( "request_type", [ - reservation.ListCapacityCommitmentsRequest, + reservation.CreateAssignmentRequest, dict, ], ) -def test_list_capacity_commitments_rest_call_success(request_type): +def test_create_assignment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request_init["assignment"] = { + "name": "name_value", + "assignee": "assignee_value", + "job_type": 1, + "state": 1, + "enable_gemini_in_bigquery": True, + "scheduling_policy": {"concurrency": 1195, "max_slots": 986}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = reservation.CreateAssignmentRequest.meta.fields["assignment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["assignment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["assignment"][field])): + del request_init["assignment"][field][i][subfield] + else: + del request_init["assignment"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.ListCapacityCommitmentsResponse( - next_page_token="next_page_token_value", + return_value = reservation.Assignment( + name="name_value", + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + enable_gemini_in_bigquery=True, ) # Wrap the value into a proper Response obj @@ -16617,20 +21716,24 @@ def test_list_capacity_commitments_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.ListCapacityCommitmentsResponse.pb(return_value) + return_value = reservation.Assignment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_capacity_commitments(request) + response = client.create_assignment(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListCapacityCommitmentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, reservation.Assignment) + assert response.name == "name_value" + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + assert response.enable_gemini_in_bigquery is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_capacity_commitments_rest_interceptors(null_interceptor): +def test_create_assignment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16644,18 +21747,18 @@ def test_list_capacity_commitments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_list_capacity_commitments" + transports.ReservationServiceRestInterceptor, "post_create_assignment" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_list_capacity_commitments_with_metadata", + "post_create_assignment_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_list_capacity_commitments" + transports.ReservationServiceRestInterceptor, "pre_create_assignment" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.ListCapacityCommitmentsRequest.pb( - reservation.ListCapacityCommitmentsRequest() + pb_message = reservation.CreateAssignmentRequest.pb( + reservation.CreateAssignmentRequest() ) transcode.return_value = { "method": "post", @@ -16667,24 +21770,19 @@ def test_list_capacity_commitments_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.ListCapacityCommitmentsResponse.to_json( - reservation.ListCapacityCommitmentsResponse() - ) + return_value = reservation.Assignment.to_json(reservation.Assignment()) req.return_value.content = return_value - request = reservation.ListCapacityCommitmentsRequest() + request = reservation.CreateAssignmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.ListCapacityCommitmentsResponse() - post_with_metadata.return_value = ( - reservation.ListCapacityCommitmentsResponse(), - metadata, - ) + post.return_value = reservation.Assignment() + post_with_metadata.return_value = reservation.Assignment(), metadata - client.list_capacity_commitments( + client.create_assignment( request, metadata=[ ("key", "val"), @@ -16697,16 +21795,14 @@ def test_list_capacity_commitments_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_capacity_commitment_rest_bad_request( - request_type=reservation.GetCapacityCommitmentRequest, +def test_list_assignments_rest_bad_request( + request_type=reservation.ListAssignmentsRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16721,39 +21817,30 @@ def test_get_capacity_commitment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_capacity_commitment(request) + client.list_assignments(request) @pytest.mark.parametrize( "request_type", [ - reservation.GetCapacityCommitmentRequest, + reservation.ListAssignmentsRequest, dict, ], ) -def test_get_capacity_commitment_rest_call_success(request_type): +def test_list_assignments_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment( - name="name_value", - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, + return_value = reservation.ListAssignmentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -16761,27 +21848,20 @@ def test_get_capacity_commitment_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) + return_value = reservation.ListAssignmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_capacity_commitment(request) + response = client.list_assignments(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == "name_value" - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True + assert isinstance(response, pagers.ListAssignmentsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_capacity_commitment_rest_interceptors(null_interceptor): +def test_list_assignments_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16795,18 +21875,18 @@ def test_get_capacity_commitment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_get_capacity_commitment" + transports.ReservationServiceRestInterceptor, "post_list_assignments" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_get_capacity_commitment_with_metadata", + "post_list_assignments_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_get_capacity_commitment" + transports.ReservationServiceRestInterceptor, "pre_list_assignments" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.GetCapacityCommitmentRequest.pb( - reservation.GetCapacityCommitmentRequest() + pb_message = reservation.ListAssignmentsRequest.pb( + reservation.ListAssignmentsRequest() ) transcode.return_value = { "method": "post", @@ -16818,21 +21898,24 @@ def test_get_capacity_commitment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.CapacityCommitment.to_json( - reservation.CapacityCommitment() + return_value = reservation.ListAssignmentsResponse.to_json( + reservation.ListAssignmentsResponse() ) req.return_value.content = return_value - request = reservation.GetCapacityCommitmentRequest() + request = reservation.ListAssignmentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.CapacityCommitment() - post_with_metadata.return_value = reservation.CapacityCommitment(), metadata + post.return_value = reservation.ListAssignmentsResponse() + post_with_metadata.return_value = ( + reservation.ListAssignmentsResponse(), + metadata, + ) - client.get_capacity_commitment( + client.list_assignments( request, metadata=[ ("key", "val"), @@ -16845,15 +21928,15 @@ def test_get_capacity_commitment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_capacity_commitment_rest_bad_request( - request_type=reservation.DeleteCapacityCommitmentRequest, +def test_delete_assignment_rest_bad_request( + request_type=reservation.DeleteAssignmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" } request = request_type(**request_init) @@ -16869,24 +21952,24 @@ def test_delete_capacity_commitment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_capacity_commitment(request) + client.delete_assignment(request) @pytest.mark.parametrize( "request_type", [ - reservation.DeleteCapacityCommitmentRequest, + reservation.DeleteAssignmentRequest, dict, ], ) -def test_delete_capacity_commitment_rest_call_success(request_type): +def test_delete_assignment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" } request = request_type(**request_init) @@ -16902,14 +21985,14 @@ def test_delete_capacity_commitment_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_capacity_commitment(request) + response = client.delete_assignment(request) # Establish that the response is the type that we expect. assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_capacity_commitment_rest_interceptors(null_interceptor): +def test_delete_assignment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -16923,11 +22006,11 @@ def test_delete_capacity_commitment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_delete_capacity_commitment" + transports.ReservationServiceRestInterceptor, "pre_delete_assignment" ) as pre: pre.assert_not_called() - pb_message = reservation.DeleteCapacityCommitmentRequest.pb( - reservation.DeleteCapacityCommitmentRequest() + pb_message = reservation.DeleteAssignmentRequest.pb( + reservation.DeleteAssignmentRequest() ) transcode.return_value = { "method": "post", @@ -16940,14 +22023,14 @@ def test_delete_capacity_commitment_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - request = reservation.DeleteCapacityCommitmentRequest() + request = reservation.DeleteAssignmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_capacity_commitment( + client.delete_assignment( request, metadata=[ ("key", "val"), @@ -16958,158 +22041,52 @@ def test_delete_capacity_commitment_rest_interceptors(null_interceptor): pre.assert_called_once() -def test_update_capacity_commitment_rest_bad_request( - request_type=reservation.UpdateCapacityCommitmentRequest, -): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "capacity_commitment": { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } - } - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_capacity_commitment(request) - - -@pytest.mark.parametrize( - "request_type", - [ - reservation.UpdateCapacityCommitmentRequest, - dict, - ], -) -def test_update_capacity_commitment_rest_call_success(request_type): - client = ReservationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "capacity_commitment": { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } - } - request_init["capacity_commitment"] = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3", - "slot_count": 1098, - "plan": 3, - "state": 1, - "commitment_start_time": {"seconds": 751, "nanos": 543}, - "commitment_end_time": {}, - "failure_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "renewal_plan": 3, - "multi_region_auxiliary": True, - "edition": 1, - "is_flat_rate": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = reservation.UpdateCapacityCommitmentRequest.meta.fields[ - "capacity_commitment" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +def test_search_assignments_rest_bad_request( + request_type=reservation.SearchAssignmentsRequest, +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - subfields_not_in_runtime = [] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.search_assignments(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["capacity_commitment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + reservation.SearchAssignmentsRequest, + dict, + ], +) +def test_search_assignments_rest_call_success(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["capacity_commitment"][field])): - del request_init["capacity_commitment"][field][i][subfield] - else: - del request_init["capacity_commitment"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment( - name="name_value", - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, + return_value = reservation.SearchAssignmentsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -17117,27 +22094,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) + return_value = reservation.SearchAssignmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_capacity_commitment(request) + response = client.search_assignments(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) - assert response.name == "name_value" - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True + assert isinstance(response, pagers.SearchAssignmentsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_capacity_commitment_rest_interceptors(null_interceptor): +def test_search_assignments_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17151,18 +22121,18 @@ def test_update_capacity_commitment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_update_capacity_commitment" + transports.ReservationServiceRestInterceptor, "post_search_assignments" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_update_capacity_commitment_with_metadata", + "post_search_assignments_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_update_capacity_commitment" + transports.ReservationServiceRestInterceptor, "pre_search_assignments" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.UpdateCapacityCommitmentRequest.pb( - reservation.UpdateCapacityCommitmentRequest() + pb_message = reservation.SearchAssignmentsRequest.pb( + reservation.SearchAssignmentsRequest() ) transcode.return_value = { "method": "post", @@ -17174,21 +22144,24 @@ def test_update_capacity_commitment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.CapacityCommitment.to_json( - reservation.CapacityCommitment() + return_value = reservation.SearchAssignmentsResponse.to_json( + reservation.SearchAssignmentsResponse() ) req.return_value.content = return_value - request = reservation.UpdateCapacityCommitmentRequest() + request = reservation.SearchAssignmentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.CapacityCommitment() - post_with_metadata.return_value = reservation.CapacityCommitment(), metadata + post.return_value = reservation.SearchAssignmentsResponse() + post_with_metadata.return_value = ( + reservation.SearchAssignmentsResponse(), + metadata, + ) - client.update_capacity_commitment( + client.search_assignments( request, metadata=[ ("key", "val"), @@ -17201,16 +22174,14 @@ def test_update_capacity_commitment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_split_capacity_commitment_rest_bad_request( - request_type=reservation.SplitCapacityCommitmentRequest, +def test_search_all_assignments_rest_bad_request( + request_type=reservation.SearchAllAssignmentsRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17225,50 +22196,51 @@ def test_split_capacity_commitment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.split_capacity_commitment(request) + client.search_all_assignments(request) @pytest.mark.parametrize( "request_type", [ - reservation.SplitCapacityCommitmentRequest, + reservation.SearchAllAssignmentsRequest, dict, ], ) -def test_split_capacity_commitment_rest_call_success(request_type): +def test_search_all_assignments_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/capacityCommitments/sample3" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.SplitCapacityCommitmentResponse() + return_value = reservation.SearchAllAssignmentsResponse( + next_page_token="next_page_token_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.SplitCapacityCommitmentResponse.pb(return_value) + return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.split_capacity_commitment(request) + response = client.search_all_assignments(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.SplitCapacityCommitmentResponse) + assert isinstance(response, pagers.SearchAllAssignmentsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_split_capacity_commitment_rest_interceptors(null_interceptor): +def test_search_all_assignments_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17282,18 +22254,18 @@ def test_split_capacity_commitment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_split_capacity_commitment" + transports.ReservationServiceRestInterceptor, "post_search_all_assignments" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_split_capacity_commitment_with_metadata", + "post_search_all_assignments_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_split_capacity_commitment" + transports.ReservationServiceRestInterceptor, "pre_search_all_assignments" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.SplitCapacityCommitmentRequest.pb( - reservation.SplitCapacityCommitmentRequest() + pb_message = reservation.SearchAllAssignmentsRequest.pb( + reservation.SearchAllAssignmentsRequest() ) transcode.return_value = { "method": "post", @@ -17305,24 +22277,24 @@ def test_split_capacity_commitment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.SplitCapacityCommitmentResponse.to_json( - reservation.SplitCapacityCommitmentResponse() + return_value = reservation.SearchAllAssignmentsResponse.to_json( + reservation.SearchAllAssignmentsResponse() ) req.return_value.content = return_value - request = reservation.SplitCapacityCommitmentRequest() + request = reservation.SearchAllAssignmentsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.SplitCapacityCommitmentResponse() + post.return_value = reservation.SearchAllAssignmentsResponse() post_with_metadata.return_value = ( - reservation.SplitCapacityCommitmentResponse(), + reservation.SearchAllAssignmentsResponse(), metadata, ) - client.split_capacity_commitment( + client.search_all_assignments( request, metadata=[ ("key", "val"), @@ -17335,14 +22307,16 @@ def test_split_capacity_commitment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_merge_capacity_commitments_rest_bad_request( - request_type=reservation.MergeCapacityCommitmentsRequest, +def test_move_assignment_rest_bad_request( + request_type=reservation.MoveAssignmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17357,37 +22331,36 @@ def test_merge_capacity_commitments_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.merge_capacity_commitments(request) + client.move_assignment(request) @pytest.mark.parametrize( "request_type", [ - reservation.MergeCapacityCommitmentsRequest, + reservation.MoveAssignmentRequest, dict, ], ) -def test_merge_capacity_commitments_rest_call_success(request_type): +def test_move_assignment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.CapacityCommitment( + return_value = reservation.Assignment( name="name_value", - slot_count=1098, - plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - state=reservation.CapacityCommitment.State.PENDING, - renewal_plan=reservation.CapacityCommitment.CommitmentPlan.FLEX, - multi_region_auxiliary=True, - edition=reservation.Edition.STANDARD, - is_flat_rate=True, + assignee="assignee_value", + job_type=reservation.Assignment.JobType.PIPELINE, + state=reservation.Assignment.State.PENDING, + enable_gemini_in_bigquery=True, ) # Wrap the value into a proper Response obj @@ -17395,27 +22368,24 @@ def test_merge_capacity_commitments_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.CapacityCommitment.pb(return_value) + return_value = reservation.Assignment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.merge_capacity_commitments(request) + response = client.move_assignment(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.CapacityCommitment) + assert isinstance(response, reservation.Assignment) assert response.name == "name_value" - assert response.slot_count == 1098 - assert response.plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.state == reservation.CapacityCommitment.State.PENDING - assert response.renewal_plan == reservation.CapacityCommitment.CommitmentPlan.FLEX - assert response.multi_region_auxiliary is True - assert response.edition == reservation.Edition.STANDARD - assert response.is_flat_rate is True + assert response.assignee == "assignee_value" + assert response.job_type == reservation.Assignment.JobType.PIPELINE + assert response.state == reservation.Assignment.State.PENDING + assert response.enable_gemini_in_bigquery is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_merge_capacity_commitments_rest_interceptors(null_interceptor): +def test_move_assignment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17429,18 +22399,18 @@ def test_merge_capacity_commitments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_merge_capacity_commitments" + transports.ReservationServiceRestInterceptor, "post_move_assignment" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_merge_capacity_commitments_with_metadata", + "post_move_assignment_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_merge_capacity_commitments" + transports.ReservationServiceRestInterceptor, "pre_move_assignment" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.MergeCapacityCommitmentsRequest.pb( - reservation.MergeCapacityCommitmentsRequest() + pb_message = reservation.MoveAssignmentRequest.pb( + reservation.MoveAssignmentRequest() ) transcode.return_value = { "method": "post", @@ -17452,21 +22422,19 @@ def test_merge_capacity_commitments_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.CapacityCommitment.to_json( - reservation.CapacityCommitment() - ) + return_value = reservation.Assignment.to_json(reservation.Assignment()) req.return_value.content = return_value - request = reservation.MergeCapacityCommitmentsRequest() + request = reservation.MoveAssignmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.CapacityCommitment() - post_with_metadata.return_value = reservation.CapacityCommitment(), metadata + post.return_value = reservation.Assignment() + post_with_metadata.return_value = reservation.Assignment(), metadata - client.merge_capacity_commitments( + client.move_assignment( request, metadata=[ ("key", "val"), @@ -17479,14 +22447,18 @@ def test_merge_capacity_commitments_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_assignment_rest_bad_request( - request_type=reservation.CreateAssignmentRequest, +def test_update_assignment_rest_bad_request( + request_type=reservation.UpdateAssignmentRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = { + "assignment": { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17501,36 +22473,41 @@ def test_create_assignment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_assignment(request) + client.update_assignment(request) @pytest.mark.parametrize( "request_type", [ - reservation.CreateAssignmentRequest, + reservation.UpdateAssignmentRequest, dict, ], ) -def test_create_assignment_rest_call_success(request_type): +def test_update_assignment_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = { + "assignment": { + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + } + } request_init["assignment"] = { - "name": "name_value", + "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4", "assignee": "assignee_value", "job_type": 1, "state": 1, "enable_gemini_in_bigquery": True, + "scheduling_policy": {"concurrency": 1195, "max_slots": 986}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = reservation.CreateAssignmentRequest.meta.fields["assignment"] + test_field = reservation.UpdateAssignmentRequest.meta.fields["assignment"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -17615,7 +22592,7 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_assignment(request) + response = client.update_assignment(request) # Establish that the response is the type that we expect. assert isinstance(response, reservation.Assignment) @@ -17627,7 +22604,7 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_assignment_rest_interceptors(null_interceptor): +def test_update_assignment_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17641,18 +22618,18 @@ def test_create_assignment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_create_assignment" + transports.ReservationServiceRestInterceptor, "post_update_assignment" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_create_assignment_with_metadata", + "post_update_assignment_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_create_assignment" + transports.ReservationServiceRestInterceptor, "pre_update_assignment" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.CreateAssignmentRequest.pb( - reservation.CreateAssignmentRequest() + pb_message = reservation.UpdateAssignmentRequest.pb( + reservation.UpdateAssignmentRequest() ) transcode.return_value = { "method": "post", @@ -17667,7 +22644,7 @@ def test_create_assignment_rest_interceptors(null_interceptor): return_value = reservation.Assignment.to_json(reservation.Assignment()) req.return_value.content = return_value - request = reservation.CreateAssignmentRequest() + request = reservation.UpdateAssignmentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -17676,7 +22653,7 @@ def test_create_assignment_rest_interceptors(null_interceptor): post.return_value = reservation.Assignment() post_with_metadata.return_value = reservation.Assignment(), metadata - client.create_assignment( + client.update_assignment( request, metadata=[ ("key", "val"), @@ -17689,14 +22666,14 @@ def test_create_assignment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_assignments_rest_bad_request( - request_type=reservation.ListAssignmentsRequest, +def test_get_bi_reservation_rest_bad_request( + request_type=reservation.GetBiReservationRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/biReservation"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17711,30 +22688,31 @@ def test_list_assignments_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_assignments(request) + client.get_bi_reservation(request) @pytest.mark.parametrize( "request_type", [ - reservation.ListAssignmentsRequest, + reservation.GetBiReservationRequest, dict, ], ) -def test_list_assignments_rest_call_success(request_type): +def test_get_bi_reservation_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/reservations/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/biReservation"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.ListAssignmentsResponse( - next_page_token="next_page_token_value", + return_value = reservation.BiReservation( + name="name_value", + size=443, ) # Wrap the value into a proper Response obj @@ -17742,20 +22720,21 @@ def test_list_assignments_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.ListAssignmentsResponse.pb(return_value) + return_value = reservation.BiReservation.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_assignments(request) + response = client.get_bi_reservation(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssignmentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, reservation.BiReservation) + assert response.name == "name_value" + assert response.size == 443 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assignments_rest_interceptors(null_interceptor): +def test_get_bi_reservation_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17769,18 +22748,18 @@ def test_list_assignments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_list_assignments" + transports.ReservationServiceRestInterceptor, "post_get_bi_reservation" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_list_assignments_with_metadata", + "post_get_bi_reservation_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_list_assignments" + transports.ReservationServiceRestInterceptor, "pre_get_bi_reservation" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.ListAssignmentsRequest.pb( - reservation.ListAssignmentsRequest() + pb_message = reservation.GetBiReservationRequest.pb( + reservation.GetBiReservationRequest() ) transcode.return_value = { "method": "post", @@ -17792,24 +22771,19 @@ def test_list_assignments_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.ListAssignmentsResponse.to_json( - reservation.ListAssignmentsResponse() - ) + return_value = reservation.BiReservation.to_json(reservation.BiReservation()) req.return_value.content = return_value - request = reservation.ListAssignmentsRequest() + request = reservation.GetBiReservationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.ListAssignmentsResponse() - post_with_metadata.return_value = ( - reservation.ListAssignmentsResponse(), - metadata, - ) + post.return_value = reservation.BiReservation() + post_with_metadata.return_value = reservation.BiReservation(), metadata - client.list_assignments( + client.get_bi_reservation( request, metadata=[ ("key", "val"), @@ -17822,15 +22796,15 @@ def test_list_assignments_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_assignment_rest_bad_request( - request_type=reservation.DeleteAssignmentRequest, +def test_update_bi_reservation_rest_bad_request( + request_type=reservation.UpdateBiReservationRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + "bi_reservation": {"name": "projects/sample1/locations/sample2/biReservation"} } request = request_type(**request_init) @@ -17846,47 +22820,134 @@ def test_delete_assignment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_assignment(request) + client.update_bi_reservation(request) @pytest.mark.parametrize( "request_type", [ - reservation.DeleteAssignmentRequest, + reservation.UpdateBiReservationRequest, dict, ], ) -def test_delete_assignment_rest_call_success(request_type): +def test_update_bi_reservation_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + "bi_reservation": {"name": "projects/sample1/locations/sample2/biReservation"} + } + request_init["bi_reservation"] = { + "name": "projects/sample1/locations/sample2/biReservation", + "update_time": {"seconds": 751, "nanos": 543}, + "size": 443, + "preferred_tables": [ + { + "project_id": "project_id_value", + "dataset_id": "dataset_id_value", + "table_id": "table_id_value", + } + ], } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = reservation.UpdateBiReservationRequest.meta.fields["bi_reservation"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["bi_reservation"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["bi_reservation"][field])): + del request_init["bi_reservation"][field][i][subfield] + else: + del request_init["bi_reservation"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = reservation.BiReservation( + name="name_value", + size=443, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = reservation.BiReservation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_assignment(request) + response = client.update_bi_reservation(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, reservation.BiReservation) + assert response.name == "name_value" + assert response.size == 443 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_assignment_rest_interceptors(null_interceptor): +def test_update_bi_reservation_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -17900,11 +22961,18 @@ def test_delete_assignment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_delete_assignment" + transports.ReservationServiceRestInterceptor, "post_update_bi_reservation" + ) as post, mock.patch.object( + transports.ReservationServiceRestInterceptor, + "post_update_bi_reservation_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_update_bi_reservation" ) as pre: pre.assert_not_called() - pb_message = reservation.DeleteAssignmentRequest.pb( - reservation.DeleteAssignmentRequest() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = reservation.UpdateBiReservationRequest.pb( + reservation.UpdateBiReservationRequest() ) transcode.return_value = { "method": "post", @@ -17916,15 +22984,19 @@ def test_delete_assignment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = reservation.BiReservation.to_json(reservation.BiReservation()) + req.return_value.content = return_value - request = reservation.DeleteAssignmentRequest() + request = reservation.UpdateBiReservationRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = reservation.BiReservation() + post_with_metadata.return_value = reservation.BiReservation(), metadata - client.delete_assignment( + client.update_bi_reservation( request, metadata=[ ("key", "val"), @@ -17933,16 +23005,20 @@ def test_delete_assignment_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_search_assignments_rest_bad_request( - request_type=reservation.SearchAssignmentsRequest, +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "resource": "projects/sample1/locations/sample2/reservations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -17957,51 +23033,52 @@ def test_search_assignments_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_assignments(request) + client.get_iam_policy(request) @pytest.mark.parametrize( "request_type", [ - reservation.SearchAssignmentsRequest, + iam_policy_pb2.GetIamPolicyRequest, dict, ], ) -def test_search_assignments_rest_call_success(request_type): +def test_get_iam_policy_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "resource": "projects/sample1/locations/sample2/reservations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.SearchAssignmentsResponse( - next_page_token="next_page_token_value", + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.SearchAssignmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_assignments(request) + response = client.get_iam_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAssignmentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_assignments_rest_interceptors(null_interceptor): +def test_get_iam_policy_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18015,19 +23092,17 @@ def test_search_assignments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_search_assignments" + transports.ReservationServiceRestInterceptor, "post_get_iam_policy" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_search_assignments_with_metadata", + "post_get_iam_policy_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_search_assignments" + transports.ReservationServiceRestInterceptor, "pre_get_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.SearchAssignmentsRequest.pb( - reservation.SearchAssignmentsRequest() - ) + pb_message = iam_policy_pb2.GetIamPolicyRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18036,26 +23111,21 @@ def test_search_assignments_rest_interceptors(null_interceptor): } req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.SearchAssignmentsResponse.to_json( - reservation.SearchAssignmentsResponse() - ) + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(policy_pb2.Policy()) req.return_value.content = return_value - request = reservation.SearchAssignmentsRequest() + request = iam_policy_pb2.GetIamPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.SearchAssignmentsResponse() - post_with_metadata.return_value = ( - reservation.SearchAssignmentsResponse(), - metadata, - ) + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata - client.search_assignments( + client.get_iam_policy( request, metadata=[ ("key", "val"), @@ -18068,14 +23138,16 @@ def test_search_assignments_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_search_all_assignments_rest_bad_request( - request_type=reservation.SearchAllAssignmentsRequest, +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "resource": "projects/sample1/locations/sample2/reservations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18090,51 +23162,52 @@ def test_search_all_assignments_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_all_assignments(request) + client.set_iam_policy(request) @pytest.mark.parametrize( "request_type", [ - reservation.SearchAllAssignmentsRequest, + iam_policy_pb2.SetIamPolicyRequest, dict, ], ) -def test_search_all_assignments_rest_call_success(request_type): +def test_set_iam_policy_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "resource": "projects/sample1/locations/sample2/reservations/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.SearchAllAssignmentsResponse( - next_page_token="next_page_token_value", + return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.SearchAllAssignmentsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_all_assignments(request) + response = client.set_iam_policy(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllAssignmentsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, policy_pb2.Policy) + assert response.version == 774 + assert response.etag == b"etag_blob" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_assignments_rest_interceptors(null_interceptor): +def test_set_iam_policy_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18148,19 +23221,17 @@ def test_search_all_assignments_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_search_all_assignments" + transports.ReservationServiceRestInterceptor, "post_set_iam_policy" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_search_all_assignments_with_metadata", + "post_set_iam_policy_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_search_all_assignments" + transports.ReservationServiceRestInterceptor, "pre_set_iam_policy" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.SearchAllAssignmentsRequest.pb( - reservation.SearchAllAssignmentsRequest() - ) + pb_message = iam_policy_pb2.SetIamPolicyRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18171,24 +23242,19 @@ def test_search_all_assignments_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.SearchAllAssignmentsResponse.to_json( - reservation.SearchAllAssignmentsResponse() - ) + return_value = json_format.MessageToJson(policy_pb2.Policy()) req.return_value.content = return_value - request = reservation.SearchAllAssignmentsRequest() + request = iam_policy_pb2.SetIamPolicyRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.SearchAllAssignmentsResponse() - post_with_metadata.return_value = ( - reservation.SearchAllAssignmentsResponse(), - metadata, - ) + post.return_value = policy_pb2.Policy() + post_with_metadata.return_value = policy_pb2.Policy(), metadata - client.search_all_assignments( + client.set_iam_policy( request, metadata=[ ("key", "val"), @@ -18201,15 +23267,15 @@ def test_search_all_assignments_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_move_assignment_rest_bad_request( - request_type=reservation.MoveAssignmentRequest, +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + "resource": "projects/sample1/locations/sample2/reservations/sample3" } request = request_type(**request_init) @@ -18225,61 +23291,50 @@ def test_move_assignment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.move_assignment(request) + client.test_iam_permissions(request) @pytest.mark.parametrize( "request_type", [ - reservation.MoveAssignmentRequest, + iam_policy_pb2.TestIamPermissionsRequest, dict, ], ) -def test_move_assignment_rest_call_success(request_type): +def test_test_iam_permissions_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" + "resource": "projects/sample1/locations/sample2/reservations/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.Assignment( - name="name_value", - assignee="assignee_value", - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, + return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.move_assignment(request) + response = client.test_iam_permissions(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) - assert response.name == "name_value" - assert response.assignee == "assignee_value" - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + assert response.permissions == ["permissions_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_move_assignment_rest_interceptors(null_interceptor): +def test_test_iam_permissions_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18293,19 +23348,17 @@ def test_move_assignment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_move_assignment" + transports.ReservationServiceRestInterceptor, "post_test_iam_permissions" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_move_assignment_with_metadata", + "post_test_iam_permissions_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_move_assignment" + transports.ReservationServiceRestInterceptor, "pre_test_iam_permissions" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.MoveAssignmentRequest.pb( - reservation.MoveAssignmentRequest() - ) + pb_message = iam_policy_pb2.TestIamPermissionsRequest() transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18316,19 +23369,24 @@ def test_move_assignment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Assignment.to_json(reservation.Assignment()) + return_value = json_format.MessageToJson( + iam_policy_pb2.TestIamPermissionsResponse() + ) req.return_value.content = return_value - request = reservation.MoveAssignmentRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.Assignment() - post_with_metadata.return_value = reservation.Assignment(), metadata + post.return_value = iam_policy_pb2.TestIamPermissionsResponse() + post_with_metadata.return_value = ( + iam_policy_pb2.TestIamPermissionsResponse(), + metadata, + ) - client.move_assignment( + client.test_iam_permissions( request, metadata=[ ("key", "val"), @@ -18341,18 +23399,14 @@ def test_move_assignment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_assignment_rest_bad_request( - request_type=reservation.UpdateAssignmentRequest, +def test_create_reservation_group_rest_bad_request( + request_type=reservation.CreateReservationGroupRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "assignment": { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" - } - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18367,40 +23421,32 @@ def test_update_assignment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_assignment(request) + client.create_reservation_group(request) @pytest.mark.parametrize( "request_type", [ - reservation.UpdateAssignmentRequest, + reservation.CreateReservationGroupRequest, dict, ], ) -def test_update_assignment_rest_call_success(request_type): +def test_create_reservation_group_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "assignment": { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4" - } - } - request_init["assignment"] = { - "name": "projects/sample1/locations/sample2/reservations/sample3/assignments/sample4", - "assignee": "assignee_value", - "job_type": 1, - "state": 1, - "enable_gemini_in_bigquery": True, - } + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["reservation_group"] = {"name": "name_value"} # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = reservation.UpdateAssignmentRequest.meta.fields["assignment"] + test_field = reservation.CreateReservationGroupRequest.meta.fields[ + "reservation_group" + ] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18428,7 +23474,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["assignment"].items(): # pragma: NO COVER + for field, value in request_init["reservation_group"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18458,21 +23504,17 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["assignment"][field])): - del request_init["assignment"][field][i][subfield] + for i in range(0, len(request_init["reservation_group"][field])): + del request_init["reservation_group"][field][i][subfield] else: - del request_init["assignment"][field][subfield] + del request_init["reservation_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.Assignment( + return_value = reservation.ReservationGroup( name="name_value", - assignee="assignee_value", - job_type=reservation.Assignment.JobType.PIPELINE, - state=reservation.Assignment.State.PENDING, - enable_gemini_in_bigquery=True, ) # Wrap the value into a proper Response obj @@ -18480,24 +23522,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.Assignment.pb(return_value) + return_value = reservation.ReservationGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_assignment(request) + response = client.create_reservation_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.Assignment) + assert isinstance(response, reservation.ReservationGroup) assert response.name == "name_value" - assert response.assignee == "assignee_value" - assert response.job_type == reservation.Assignment.JobType.PIPELINE - assert response.state == reservation.Assignment.State.PENDING - assert response.enable_gemini_in_bigquery is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_assignment_rest_interceptors(null_interceptor): +def test_create_reservation_group_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18511,18 +23549,18 @@ def test_update_assignment_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_update_assignment" + transports.ReservationServiceRestInterceptor, "post_create_reservation_group" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_update_assignment_with_metadata", + "post_create_reservation_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_update_assignment" + transports.ReservationServiceRestInterceptor, "pre_create_reservation_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.UpdateAssignmentRequest.pb( - reservation.UpdateAssignmentRequest() + pb_message = reservation.CreateReservationGroupRequest.pb( + reservation.CreateReservationGroupRequest() ) transcode.return_value = { "method": "post", @@ -18534,19 +23572,21 @@ def test_update_assignment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.Assignment.to_json(reservation.Assignment()) + return_value = reservation.ReservationGroup.to_json( + reservation.ReservationGroup() + ) req.return_value.content = return_value - request = reservation.UpdateAssignmentRequest() + request = reservation.CreateReservationGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.Assignment() - post_with_metadata.return_value = reservation.Assignment(), metadata + post.return_value = reservation.ReservationGroup() + post_with_metadata.return_value = reservation.ReservationGroup(), metadata - client.update_assignment( + client.create_reservation_group( request, metadata=[ ("key", "val"), @@ -18559,14 +23599,16 @@ def test_update_assignment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_bi_reservation_rest_bad_request( - request_type=reservation.GetBiReservationRequest, +def test_get_reservation_group_rest_bad_request( + request_type=reservation.GetReservationGroupRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/biReservation"} + request_init = { + "name": "projects/sample1/locations/sample2/reservationGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18581,31 +23623,32 @@ def test_get_bi_reservation_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_bi_reservation(request) + client.get_reservation_group(request) @pytest.mark.parametrize( "request_type", [ - reservation.GetBiReservationRequest, + reservation.GetReservationGroupRequest, dict, ], ) -def test_get_bi_reservation_rest_call_success(request_type): +def test_get_reservation_group_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/biReservation"} + request_init = { + "name": "projects/sample1/locations/sample2/reservationGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation( + return_value = reservation.ReservationGroup( name="name_value", - size=443, ) # Wrap the value into a proper Response obj @@ -18613,21 +23656,20 @@ def test_get_bi_reservation_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) + return_value = reservation.ReservationGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_bi_reservation(request) + response = client.get_reservation_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.BiReservation) + assert isinstance(response, reservation.ReservationGroup) assert response.name == "name_value" - assert response.size == 443 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_bi_reservation_rest_interceptors(null_interceptor): +def test_get_reservation_group_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18641,18 +23683,18 @@ def test_get_bi_reservation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_get_bi_reservation" + transports.ReservationServiceRestInterceptor, "post_get_reservation_group" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_get_bi_reservation_with_metadata", + "post_get_reservation_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_get_bi_reservation" + transports.ReservationServiceRestInterceptor, "pre_get_reservation_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.GetBiReservationRequest.pb( - reservation.GetBiReservationRequest() + pb_message = reservation.GetReservationGroupRequest.pb( + reservation.GetReservationGroupRequest() ) transcode.return_value = { "method": "post", @@ -18664,19 +23706,21 @@ def test_get_bi_reservation_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.BiReservation.to_json(reservation.BiReservation()) + return_value = reservation.ReservationGroup.to_json( + reservation.ReservationGroup() + ) req.return_value.content = return_value - request = reservation.GetBiReservationRequest() + request = reservation.GetReservationGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.BiReservation() - post_with_metadata.return_value = reservation.BiReservation(), metadata + post.return_value = reservation.ReservationGroup() + post_with_metadata.return_value = reservation.ReservationGroup(), metadata - client.get_bi_reservation( + client.get_reservation_group( request, metadata=[ ("key", "val"), @@ -18689,15 +23733,15 @@ def test_get_bi_reservation_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_bi_reservation_rest_bad_request( - request_type=reservation.UpdateBiReservationRequest, +def test_delete_reservation_group_rest_bad_request( + request_type=reservation.DeleteReservationGroupRequest, ): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "bi_reservation": {"name": "projects/sample1/locations/sample2/biReservation"} + "name": "projects/sample1/locations/sample2/reservationGroups/sample3" } request = request_type(**request_init) @@ -18713,112 +23757,141 @@ def test_update_bi_reservation_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_bi_reservation(request) + client.delete_reservation_group(request) @pytest.mark.parametrize( "request_type", [ - reservation.UpdateBiReservationRequest, + reservation.DeleteReservationGroupRequest, dict, ], ) -def test_update_bi_reservation_rest_call_success(request_type): +def test_delete_reservation_group_rest_call_success(request_type): client = ReservationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "bi_reservation": {"name": "projects/sample1/locations/sample2/biReservation"} - } - request_init["bi_reservation"] = { - "name": "projects/sample1/locations/sample2/biReservation", - "update_time": {"seconds": 751, "nanos": 543}, - "size": 443, - "preferred_tables": [ - { - "project_id": "project_id_value", - "dataset_id": "dataset_id_value", - "table_id": "table_id_value", - } - ], + "name": "projects/sample1/locations/sample2/reservationGroups/sample3" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = reservation.UpdateBiReservationRequest.meta.fields["bi_reservation"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_reservation_group(request) - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_reservation_group_rest_interceptors(null_interceptor): + transport = transports.ReservationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReservationServiceRestInterceptor(), + ) + client = ReservationServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReservationServiceRestInterceptor, "pre_delete_reservation_group" + ) as pre: + pre.assert_not_called() + pb_message = reservation.DeleteReservationGroupRequest.pb( + reservation.DeleteReservationGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = reservation.DeleteReservationGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_reservation_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] +def test_list_reservation_groups_rest_bad_request( + request_type=reservation.ListReservationGroupsRequest, +): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) - subfields_not_in_runtime = [] + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_reservation_groups(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["bi_reservation"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + reservation.ListReservationGroupsRequest, + dict, + ], +) +def test_list_reservation_groups_rest_call_success(request_type): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["bi_reservation"][field])): - del request_init["bi_reservation"][field][i][subfield] - else: - del request_init["bi_reservation"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = reservation.BiReservation( - name="name_value", - size=443, + return_value = reservation.ListReservationGroupsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -18826,21 +23899,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = reservation.BiReservation.pb(return_value) + return_value = reservation.ListReservationGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_bi_reservation(request) + response = client.list_reservation_groups(request) # Establish that the response is the type that we expect. - assert isinstance(response, reservation.BiReservation) - assert response.name == "name_value" - assert response.size == 443 + assert isinstance(response, pagers.ListReservationGroupsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_bi_reservation_rest_interceptors(null_interceptor): +def test_list_reservation_groups_rest_interceptors(null_interceptor): transport = transports.ReservationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18854,18 +23926,18 @@ def test_update_bi_reservation_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ReservationServiceRestInterceptor, "post_update_bi_reservation" + transports.ReservationServiceRestInterceptor, "post_list_reservation_groups" ) as post, mock.patch.object( transports.ReservationServiceRestInterceptor, - "post_update_bi_reservation_with_metadata", + "post_list_reservation_groups_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ReservationServiceRestInterceptor, "pre_update_bi_reservation" + transports.ReservationServiceRestInterceptor, "pre_list_reservation_groups" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = reservation.UpdateBiReservationRequest.pb( - reservation.UpdateBiReservationRequest() + pb_message = reservation.ListReservationGroupsRequest.pb( + reservation.ListReservationGroupsRequest() ) transcode.return_value = { "method": "post", @@ -18877,19 +23949,24 @@ def test_update_bi_reservation_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = reservation.BiReservation.to_json(reservation.BiReservation()) + return_value = reservation.ListReservationGroupsResponse.to_json( + reservation.ListReservationGroupsResponse() + ) req.return_value.content = return_value - request = reservation.UpdateBiReservationRequest() + request = reservation.ListReservationGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = reservation.BiReservation() - post_with_metadata.return_value = reservation.BiReservation(), metadata + post.return_value = reservation.ListReservationGroupsResponse() + post_with_metadata.return_value = ( + reservation.ListReservationGroupsResponse(), + metadata, + ) - client.update_bi_reservation( + client.list_reservation_groups( request, metadata=[ ("key", "val"), @@ -19387,6 +24464,156 @@ def test_update_bi_reservation_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.GetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.SetIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = iam_policy_pb2.TestIamPermissionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_reservation_group_empty_call_rest(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_reservation_group), "__call__" + ) as call: + client.create_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.CreateReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_reservation_group_empty_call_rest(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_reservation_group), "__call__" + ) as call: + client.get_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.GetReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_reservation_group_empty_call_rest(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_reservation_group), "__call__" + ) as call: + client.delete_reservation_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.DeleteReservationGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_reservation_groups_empty_call_rest(): + client = ReservationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_reservation_groups), "__call__" + ) as call: + client.list_reservation_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = reservation.ListReservationGroupsRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ReservationServiceClient( @@ -19442,6 +24669,13 @@ def test_reservation_service_base_transport(): "update_assignment", "get_bi_reservation", "update_bi_reservation", + "get_iam_policy", + "set_iam_policy", + "test_iam_permissions", + "create_reservation_group", + "get_reservation_group", + "delete_reservation_group", + "list_reservation_groups", ) for method in methods: with pytest.raises(NotImplementedError): @@ -19780,6 +25014,27 @@ def test_reservation_service_client_transport_session_collision(transport_name): session1 = client1.transport.update_bi_reservation._session session2 = client2.transport.update_bi_reservation._session assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.create_reservation_group._session + session2 = client2.transport.create_reservation_group._session + assert session1 != session2 + session1 = client1.transport.get_reservation_group._session + session2 = client2.transport.get_reservation_group._session + assert session1 != session2 + session1 = client1.transport.delete_reservation_group._session + session2 = client2.transport.delete_reservation_group._session + assert session1 != session2 + session1 = client1.transport.list_reservation_groups._session + session2 = client2.transport.list_reservation_groups._session + assert session1 != session2 def test_reservation_service_grpc_transport_channel(): @@ -20018,8 +25273,36 @@ def test_parse_reservation_path(): assert expected == actual +def test_reservation_group_path(): + project = "squid" + location = "clam" + reservation_group = "whelk" + expected = "projects/{project}/locations/{location}/reservationGroups/{reservation_group}".format( + project=project, + location=location, + reservation_group=reservation_group, + ) + actual = ReservationServiceClient.reservation_group_path( + project, location, reservation_group + ) + assert expected == actual + + +def test_parse_reservation_group_path(): + expected = { + "project": "octopus", + "location": "oyster", + "reservation_group": "nudibranch", + } + path = ReservationServiceClient.reservation_group_path(**expected) + + # Check that the path construction is reversible. + actual = ReservationServiceClient.parse_reservation_group_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "cuttlefish" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -20029,7 +25312,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "mussel", } path = ReservationServiceClient.common_billing_account_path(**expected) @@ -20039,7 +25322,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "winkle" expected = "folders/{folder}".format( folder=folder, ) @@ -20049,7 +25332,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "nautilus", } path = ReservationServiceClient.common_folder_path(**expected) @@ -20059,7 +25342,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "scallop" expected = "organizations/{organization}".format( organization=organization, ) @@ -20069,7 +25352,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "abalone", } path = ReservationServiceClient.common_organization_path(**expected) @@ -20079,7 +25362,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "squid" expected = "projects/{project}".format( project=project, ) @@ -20089,7 +25372,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "clam", } path = ReservationServiceClient.common_project_path(**expected) @@ -20099,8 +25382,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "whelk" + location = "octopus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -20111,8 +25394,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "oyster", + "location": "nudibranch", } path = ReservationServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-compute-v1beta/.OwlBot.yaml b/packages/google-cloud-compute-v1beta/.OwlBot.yaml deleted file mode 100644 index f57571139e49..000000000000 --- a/packages/google-cloud-compute-v1beta/.OwlBot.yaml +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -deep-remove-regex: - - /owl-bot-staging - -deep-copy-regex: - - source: /google/cloud/compute/(v1beta)/compute-v.*-py - dest: /owl-bot-staging/google-cloud-compute-v1beta/$1 - -begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 -api-name: google-cloud-compute-v1beta diff --git a/packages/google-cloud-compute-v1beta/.repo-metadata.json b/packages/google-cloud-compute-v1beta/.repo-metadata.json index 3a427d12bde3..9e249a8eb834 100644 --- a/packages/google-cloud-compute-v1beta/.repo-metadata.json +++ b/packages/google-cloud-compute-v1beta/.repo-metadata.json @@ -13,4 +13,4 @@ "default_version": "v1beta", "api_shortname": "compute", "api_description": "delivers virtual machines running in Google's innovative data centers and worldwide fiber network. Compute Engine's tooling and workflow support enable scaling from single instances to global, load-balanced cloud computing. Compute Engine's VMs boot quickly, come with persistent disk storage, deliver consistent performance and are available in many configurations." -} +} \ No newline at end of file diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/projects/client.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/projects/client.py index 43ed73e2669f..561b1153b474 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/projects/client.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/projects/client.py @@ -2183,7 +2183,15 @@ def move_disk_unary( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> compute.Operation: - r"""Moves a persistent disk from one zone to another. + r"""Starting September 29, 2025, you can't use the moveDisk API on + new projects. To move a disk to a different region or zone, + follow the steps in `Change the location of a + disk `__. + Projects that already use the moveDisk API can continue usage + until September 29, 2026. Starting November 1, 2025, API + responses will include a warning message in the response body + about the upcoming deprecation. You can skip the message to + continue using the service without interruption. .. code-block:: python @@ -2298,7 +2306,15 @@ def move_disk( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> extended_operation.ExtendedOperation: - r"""Moves a persistent disk from one zone to another. + r"""Starting September 29, 2025, you can't use the moveDisk API on + new projects. To move a disk to a different region or zone, + follow the steps in `Change the location of a + disk `__. + Projects that already use the moveDisk API can continue usage + until September 29, 2026. Starting November 1, 2025, API + responses will include a warning message in the response body + about the upcoming deprecation. You can skip the message to + continue using the service without interruption. .. code-block:: python diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/compute.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/compute.py index 94e587330a88..06abc481cc2d 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/compute.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/compute.py @@ -61821,6 +61821,11 @@ class InterconnectLocation(proto.Message): [Output Only] Server-defined URL for the resource. This field is a member of `oneof`_ ``_self_link``. + single_region_production_critical_peer_locations (MutableSequence[str]): + [Output Only] URLs of the other locations that can pair up + with this location to support Single-Region 99.99% SLA. E.g. + iad-zone1-1 and iad-zone2-5467 are Single-Region 99.99% peer + locations of each other. status (str): [Output Only] The status of this InterconnectLocation, which can take one of the following values: - CLOSED: The @@ -62023,6 +62028,12 @@ class Status(proto.Enum): number=456214797, optional=True, ) + single_region_production_critical_peer_locations: MutableSequence[ + str + ] = proto.RepeatedField( + proto.STRING, + number=439537103, + ) status: str = proto.Field( proto.STRING, number=181260274, @@ -93386,6 +93397,11 @@ class PatchOrganizationSecurityPolicyRequest(proto.Message): Name of the security policy to update. security_policy_resource (google.cloud.compute_v1beta.types.SecurityPolicy): The body resource for this request + update_mask (str): + Indicates fields to be cleared as part of + this request. + + This field is a member of `oneof`_ ``_update_mask``. """ request_id: str = proto.Field( @@ -93402,6 +93418,11 @@ class PatchOrganizationSecurityPolicyRequest(proto.Message): number=216159612, message="SecurityPolicy", ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) class PatchPacketMirroringRequest(proto.Message): @@ -94968,6 +94989,11 @@ class PatchRuleOrganizationSecurityPolicyRequest(proto.Message): Name of the security policy to update. security_policy_rule_resource (google.cloud.compute_v1beta.types.SecurityPolicyRule): The body resource for this request + update_mask (str): + Indicates fields to be cleared as part of + this request. + + This field is a member of `oneof`_ ``_update_mask``. """ priority: int = proto.Field( @@ -94989,6 +95015,11 @@ class PatchRuleOrganizationSecurityPolicyRequest(proto.Message): number=402693443, message="SecurityPolicyRule", ) + update_mask: str = proto.Field( + proto.STRING, + number=500079778, + optional=True, + ) class PatchRuleRegionNetworkFirewallPolicyRequest(proto.Message): diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_interconnect_locations.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_interconnect_locations.py index 31321b11a4d1..3a15f9549caf 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_interconnect_locations.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_interconnect_locations.py @@ -1579,6 +1579,9 @@ def test_get_rest_call_success(request_type): name="name_value", peeringdb_facility_id="peeringdb_facility_id_value", self_link="self_link_value", + single_region_production_critical_peer_locations=[ + "single_region_production_critical_peer_locations_value" + ], status="status_value", supports_pzs=True, ) @@ -1614,6 +1617,9 @@ def test_get_rest_call_success(request_type): assert response.name == "name_value" assert response.peeringdb_facility_id == "peeringdb_facility_id_value" assert response.self_link == "self_link_value" + assert response.single_region_production_critical_peer_locations == [ + "single_region_production_critical_peer_locations_value" + ] assert response.status == "status_value" assert response.supports_pzs is True diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_organization_security_policies.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_organization_security_policies.py index 3a146c4ceb61..ee9fb7cbd296 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_organization_security_policies.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_organization_security_policies.py @@ -4180,7 +4180,12 @@ def test_patch_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).patch._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4237,7 +4242,12 @@ def test_patch_rest_unset_required_fields(): unset_fields = transport.patch._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set( + ( + "requestId", + "updateMask", + ) + ) & set( ( "securityPolicy", @@ -4387,7 +4397,12 @@ def test_patch_unary_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).patch._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4444,7 +4459,12 @@ def test_patch_unary_rest_unset_required_fields(): unset_fields = transport.patch._get_unset_required_fields({}) assert set(unset_fields) == ( - set(("requestId",)) + set( + ( + "requestId", + "updateMask", + ) + ) & set( ( "securityPolicy", @@ -4598,6 +4618,7 @@ def test_patch_rule_rest_required_fields( ( "priority", "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) @@ -4660,6 +4681,7 @@ def test_patch_rule_rest_unset_required_fields(): ( "priority", "requestId", + "updateMask", ) ) & set( @@ -4807,6 +4829,7 @@ def test_patch_rule_unary_rest_required_fields( ( "priority", "request_id", + "update_mask", ) ) jsonified_request.update(unset_fields) @@ -4869,6 +4892,7 @@ def test_patch_rule_unary_rest_unset_required_fields(): ( "priority", "requestId", + "updateMask", ) ) & set( diff --git a/packages/google-cloud-compute/.OwlBot.yaml b/packages/google-cloud-compute/.OwlBot.yaml deleted file mode 100644 index 13a7bd8f17d0..000000000000 --- a/packages/google-cloud-compute/.OwlBot.yaml +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -deep-remove-regex: - - /owl-bot-staging - -deep-copy-regex: - - source: /google/cloud/compute/(v1)/compute-v.*-py - dest: /owl-bot-staging/google-cloud-compute/$1 - -begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 -api-name: google-cloud-compute diff --git a/packages/google-cloud-compute/google/cloud/compute/__init__.py b/packages/google-cloud-compute/google/cloud/compute/__init__.py index 1667f83c457e..aef642c6c51c 100644 --- a/packages/google-cloud-compute/google/cloud/compute/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute/__init__.py @@ -1751,6 +1751,9 @@ SubnetworksScopedList, SubnetworksScopedWarning, SubnetworksSetPrivateIpGoogleAccessRequest, + SubnetworkUtilizationDetails, + SubnetworkUtilizationDetailsIPV4Utilization, + SubnetworkUtilizationDetailsIPV6Utilization, Subsetting, SuspendInstanceRequest, SuspendInstancesInstanceGroupManagerRequest, @@ -3526,6 +3529,9 @@ "SubnetworksScopedList", "SubnetworksScopedWarning", "SubnetworksSetPrivateIpGoogleAccessRequest", + "SubnetworkUtilizationDetails", + "SubnetworkUtilizationDetailsIPV4Utilization", + "SubnetworkUtilizationDetailsIPV6Utilization", "Subsetting", "SuspendInstanceRequest", "SuspendInstancesInstanceGroupManagerRequest", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py index 356c8e33d716..f65cf8b7b202 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py @@ -1625,6 +1625,9 @@ SubnetworksScopedList, SubnetworksScopedWarning, SubnetworksSetPrivateIpGoogleAccessRequest, + SubnetworkUtilizationDetails, + SubnetworkUtilizationDetailsIPV4Utilization, + SubnetworkUtilizationDetailsIPV6Utilization, Subsetting, SuspendInstanceRequest, SuspendInstancesInstanceGroupManagerRequest, @@ -3382,6 +3385,9 @@ "SubnetworkLogConfig", "SubnetworkParams", "SubnetworkSecondaryRange", + "SubnetworkUtilizationDetails", + "SubnetworkUtilizationDetailsIPV4Utilization", + "SubnetworkUtilizationDetailsIPV6Utilization", "SubnetworksClient", "SubnetworksExpandIpCidrRangeRequest", "SubnetworksScopedList", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py index 47f333c721a9..efd45a36c87d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/projects/client.py @@ -2183,7 +2183,15 @@ def move_disk_unary( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> compute.Operation: - r"""Moves a persistent disk from one zone to another. + r"""Starting September 29, 2025, you can't use the moveDisk API on + new projects. To move a disk to a different region or zone, + follow the steps in `Change the location of a + disk `__. + Projects that already use the moveDisk API can continue usage + until September 29, 2026. Starting November 1, 2025, API + responses will include a warning message in the response body + about the upcoming deprecation. You can skip the message to + continue using the service without interruption. .. code-block:: python @@ -2298,7 +2306,15 @@ def move_disk( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> extended_operation.ExtendedOperation: - r"""Moves a persistent disk from one zone to another. + r"""Starting September 29, 2025, you can't use the moveDisk API on + new projects. To move a disk to a different region or zone, + follow the steps in `Change the location of a + disk `__. + Projects that already use the moveDisk API can continue usage + until September 29, 2026. Starting November 1, 2025, API + responses will include a warning message in the response body + about the upcoming deprecation. You can skip the message to + continue using the service without interruption. .. code-block:: python diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py index 0a211eb44f15..1fdff31b4056 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py @@ -1511,6 +1511,9 @@ SubnetworksScopedList, SubnetworksScopedWarning, SubnetworksSetPrivateIpGoogleAccessRequest, + SubnetworkUtilizationDetails, + SubnetworkUtilizationDetailsIPV4Utilization, + SubnetworkUtilizationDetailsIPV6Utilization, Subsetting, SuspendInstanceRequest, SuspendInstancesInstanceGroupManagerRequest, @@ -3183,6 +3186,9 @@ "SubnetworksScopedList", "SubnetworksScopedWarning", "SubnetworksSetPrivateIpGoogleAccessRequest", + "SubnetworkUtilizationDetails", + "SubnetworkUtilizationDetailsIPV4Utilization", + "SubnetworkUtilizationDetailsIPV6Utilization", "Subsetting", "SuspendInstanceRequest", "SuspendInstancesInstanceGroupManagerRequest", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py index 75954a61316c..5de12110557a 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py @@ -1516,6 +1516,9 @@ "SubnetworkLogConfig", "SubnetworkParams", "SubnetworkSecondaryRange", + "SubnetworkUtilizationDetails", + "SubnetworkUtilizationDetailsIPV4Utilization", + "SubnetworkUtilizationDetailsIPV6Utilization", "SubnetworksExpandIpCidrRangeRequest", "SubnetworksScopedList", "SubnetworksScopedWarning", @@ -36251,6 +36254,9 @@ class GetSubnetworkRequest(proto.Message): r"""A request message for Subnetworks.Get. See the method description for details. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: project (str): Project ID for this request. @@ -36258,8 +36264,33 @@ class GetSubnetworkRequest(proto.Message): Name of the region scoping this request. subnetwork (str): Name of the Subnetwork resource to return. + views (str): + Defines the extra views returned back in the subnetwork + resource. Supported values: - WITH_UTILIZATION: Utilization + data is included in the response. Check the Views enum for + the list of possible values. + + This field is a member of `oneof`_ ``_views``. """ + class Views(proto.Enum): + r"""Defines the extra views returned back in the subnetwork resource. + Supported values: - WITH_UTILIZATION: Utilization data is included + in the response. + + Values: + UNDEFINED_VIEWS (0): + A value indicating that the enum field is not + set. + DEFAULT (115302945): + No description available. + WITH_UTILIZATION (504090633): + Utilization data is included in the response. + """ + UNDEFINED_VIEWS = 0 + DEFAULT = 115302945 + WITH_UTILIZATION = 504090633 + project: str = proto.Field( proto.STRING, number=227560217, @@ -36272,6 +36303,11 @@ class GetSubnetworkRequest(proto.Message): proto.STRING, number=307827694, ) + views: str = proto.Field( + proto.STRING, + number=112204398, + optional=True, + ) class GetTargetGrpcProxyRequest(proto.Message): @@ -55309,6 +55345,11 @@ class InterconnectLocation(proto.Message): [Output Only] Server-defined URL for the resource. This field is a member of `oneof`_ ``_self_link``. + single_region_production_critical_peer_locations (MutableSequence[str]): + [Output Only] URLs of the other locations that can pair up + with this location to support Single-Region 99.99% SLA. E.g. + iad-zone1-1 and iad-zone2-5467 are Single-Region 99.99% peer + locations of each other. status (str): [Output Only] The status of this InterconnectLocation, which can take one of the following values: - CLOSED: The @@ -55501,6 +55542,12 @@ class Status(proto.Enum): number=456214797, optional=True, ) + single_region_production_critical_peer_locations: MutableSequence[ + str + ] = proto.RepeatedField( + proto.STRING, + number=439537103, + ) status: str = proto.Field( proto.STRING, number=181260274, @@ -56113,6 +56160,13 @@ class InterconnectRemoteLocation(proto.Message): requestedLinkCount cannot exceed max_lag_size_10_gbps. This field is a member of `oneof`_ ``_max_lag_size10_gbps``. + max_lag_size400_gbps (int): + [Output Only] The maximum number of 400 Gbps ports supported + in a link aggregation group (LAG). When linkType is 400 + Gbps, requestedLinkCount cannot exceed + max_lag_size_400_gbps. + + This field is a member of `oneof`_ ``_max_lag_size400_gbps``. name (str): [Output Only] Name of the resource. @@ -56290,6 +56344,11 @@ class Status(proto.Enum): number=294007573, optional=True, ) + max_lag_size400_gbps: int = proto.Field( + proto.INT32, + number=104941138, + optional=True, + ) name: str = proto.Field( proto.STRING, number=3373707, @@ -70835,8 +70894,33 @@ class ListSubnetworksRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + views (str): + Defines the extra views returned back in the subnetwork + resource. Supported values: - WITH_UTILIZATION: Utilization + data is included in the response. Check the Views enum for + the list of possible values. + + This field is a member of `oneof`_ ``_views``. """ + class Views(proto.Enum): + r"""Defines the extra views returned back in the subnetwork resource. + Supported values: - WITH_UTILIZATION: Utilization data is included + in the response. + + Values: + UNDEFINED_VIEWS (0): + A value indicating that the enum field is not + set. + DEFAULT (115302945): + No description available. + WITH_UTILIZATION (504090633): + Utilization data is included in the response. + """ + UNDEFINED_VIEWS = 0 + DEFAULT = 115302945 + WITH_UTILIZATION = 504090633 + filter: str = proto.Field( proto.STRING, number=336120696, @@ -70870,6 +70954,11 @@ class ListSubnetworksRequest(proto.Message): number=517198390, optional=True, ) + views: str = proto.Field( + proto.STRING, + number=112204398, + optional=True, + ) class ListTargetGrpcProxiesRequest(proto.Message): @@ -92428,6 +92517,11 @@ class RequestMirrorPolicy(proto.Message): supported as a mirrored backend service. This field is a member of `oneof`_ ``_backend_service``. + mirror_percent (float): + The percentage of requests to be mirrored to + ``backend_service``. + + This field is a member of `oneof`_ ``_mirror_percent``. """ backend_service: str = proto.Field( @@ -92435,6 +92529,11 @@ class RequestMirrorPolicy(proto.Message): number=306946058, optional=True, ) + mirror_percent: float = proto.Field( + proto.DOUBLE, + number=277432261, + optional=True, + ) class RequestRemovePeeringNetworkRequest(proto.Message): @@ -112676,6 +112775,12 @@ class Subnetwork(proto.Message): Output only. [Output Only] The array of internal IPv6 network ranges reserved from the subnetwork's internal IPv6 range for system use. + utilization_details (google.cloud.compute_v1.types.SubnetworkUtilizationDetails): + Output only. [Output Only] The current IP utilization of all + subnetwork ranges. Contains the total number of allocated + and free IPs in each range. + + This field is a member of `oneof`_ ``_utilization_details``. """ class Ipv6AccessType(proto.Enum): @@ -113019,6 +113124,12 @@ class State(proto.Enum): proto.STRING, number=432294995, ) + utilization_details: "SubnetworkUtilizationDetails" = proto.Field( + proto.MESSAGE, + number=125404453, + optional=True, + message="SubnetworkUtilizationDetails", + ) class SubnetworkAggregatedList(proto.Message): @@ -113389,6 +113500,127 @@ class SubnetworkSecondaryRange(proto.Message): ) +class SubnetworkUtilizationDetails(proto.Message): + r"""The current IP utilization of all subnetwork ranges. Contains + the total number of allocated and free IPs in each range. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + external_ipv6_instance_utilization (google.cloud.compute_v1.types.SubnetworkUtilizationDetailsIPV6Utilization): + Utilizations of external IPV6 IP range. + + This field is a member of `oneof`_ ``_external_ipv6_instance_utilization``. + external_ipv6_lb_utilization (google.cloud.compute_v1.types.SubnetworkUtilizationDetailsIPV6Utilization): + Utilizations of external IPV6 IP range for + NetLB. + + This field is a member of `oneof`_ ``_external_ipv6_lb_utilization``. + internal_ipv6_utilization (google.cloud.compute_v1.types.SubnetworkUtilizationDetailsIPV6Utilization): + Utilizations of internal IPV6 IP range. + + This field is a member of `oneof`_ ``_internal_ipv6_utilization``. + ipv4_utilizations (MutableSequence[google.cloud.compute_v1.types.SubnetworkUtilizationDetailsIPV4Utilization]): + Utilizations of all IPV4 IP ranges. For + primary ranges, the range name will be empty. + """ + + external_ipv6_instance_utilization: "SubnetworkUtilizationDetailsIPV6Utilization" = proto.Field( + proto.MESSAGE, + number=419750236, + optional=True, + message="SubnetworkUtilizationDetailsIPV6Utilization", + ) + external_ipv6_lb_utilization: "SubnetworkUtilizationDetailsIPV6Utilization" = ( + proto.Field( + proto.MESSAGE, + number=136563645, + optional=True, + message="SubnetworkUtilizationDetailsIPV6Utilization", + ) + ) + internal_ipv6_utilization: "SubnetworkUtilizationDetailsIPV6Utilization" = ( + proto.Field( + proto.MESSAGE, + number=69707020, + optional=True, + message="SubnetworkUtilizationDetailsIPV6Utilization", + ) + ) + ipv4_utilizations: MutableSequence[ + "SubnetworkUtilizationDetailsIPV4Utilization" + ] = proto.RepeatedField( + proto.MESSAGE, + number=206180011, + message="SubnetworkUtilizationDetailsIPV4Utilization", + ) + + +class SubnetworkUtilizationDetailsIPV4Utilization(proto.Message): + r"""The IPV4 utilization of a single IP range. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + range_name (str): + Will be set for secondary range. Empty for + primary IPv4 range. + + This field is a member of `oneof`_ ``_range_name``. + total_allocated_ip (int): + + This field is a member of `oneof`_ ``_total_allocated_ip``. + total_free_ip (int): + + This field is a member of `oneof`_ ``_total_free_ip``. + """ + + range_name: str = proto.Field( + proto.STRING, + number=332216397, + optional=True, + ) + total_allocated_ip: int = proto.Field( + proto.INT64, + number=279055546, + optional=True, + ) + total_free_ip: int = proto.Field( + proto.INT64, + number=105624031, + optional=True, + ) + + +class SubnetworkUtilizationDetailsIPV6Utilization(proto.Message): + r"""The IPV6 utilization of a single IP range. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + total_allocated_ip (google.cloud.compute_v1.types.Uint128): + + This field is a member of `oneof`_ ``_total_allocated_ip``. + total_free_ip (google.cloud.compute_v1.types.Uint128): + + This field is a member of `oneof`_ ``_total_free_ip``. + """ + + total_allocated_ip: "Uint128" = proto.Field( + proto.MESSAGE, + number=279055546, + optional=True, + message="Uint128", + ) + total_free_ip: "Uint128" = proto.Field( + proto.MESSAGE, + number=105624031, + optional=True, + message="Uint128", + ) + + class SubnetworksExpandIpCidrRangeRequest(proto.Message): r""" diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py index 104c4599df8d..380b5a75dd6f 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_locations.py @@ -1579,6 +1579,9 @@ def test_get_rest_call_success(request_type): name="name_value", peeringdb_facility_id="peeringdb_facility_id_value", self_link="self_link_value", + single_region_production_critical_peer_locations=[ + "single_region_production_critical_peer_locations_value" + ], status="status_value", supports_pzs=True, ) @@ -1614,6 +1617,9 @@ def test_get_rest_call_success(request_type): assert response.name == "name_value" assert response.peeringdb_facility_id == "peeringdb_facility_id_value" assert response.self_link == "self_link_value" + assert response.single_region_production_critical_peer_locations == [ + "single_region_production_critical_peer_locations_value" + ] assert response.status == "status_value" assert response.supports_pzs is True diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py index 1bb860d2be0c..2df75ffffe6d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_interconnect_remote_locations.py @@ -1606,6 +1606,7 @@ def test_get_rest_call_success(request_type): lacp="lacp_value", max_lag_size100_gbps=1935, max_lag_size10_gbps=1887, + max_lag_size400_gbps=1938, name="name_value", peeringdb_facility_id="peeringdb_facility_id_value", remote_service="remote_service_value", @@ -1641,6 +1642,7 @@ def test_get_rest_call_success(request_type): assert response.lacp == "lacp_value" assert response.max_lag_size100_gbps == 1935 assert response.max_lag_size10_gbps == 1887 + assert response.max_lag_size400_gbps == 1938 assert response.name == "name_value" assert response.peeringdb_facility_id == "peeringdb_facility_id_value" assert response.remote_service == "remote_service_value" diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py index cc717f2df094..dd056047b577 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py @@ -3781,7 +3781,10 @@ def test_insert_rest_call_success(request_type): }, }, "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, + "request_mirror_policy": { + "backend_service": "backend_service_value", + "mirror_percent": 0.1515, + }, "retry_policy": { "num_retries": 1197, "per_try_timeout": {}, @@ -4333,7 +4336,10 @@ def test_patch_rest_call_success(request_type): }, }, "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, + "request_mirror_policy": { + "backend_service": "backend_service_value", + "mirror_percent": 0.1515, + }, "retry_policy": { "num_retries": 1197, "per_try_timeout": {}, @@ -4754,7 +4760,10 @@ def test_update_rest_call_success(request_type): }, }, "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, + "request_mirror_policy": { + "backend_service": "backend_service_value", + "mirror_percent": 0.1515, + }, "retry_policy": { "num_retries": 1197, "per_try_timeout": {}, @@ -5179,7 +5188,10 @@ def test_validate_rest_call_success(request_type): }, }, "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, + "request_mirror_policy": { + "backend_service": "backend_service_value", + "mirror_percent": 0.1515, + }, "retry_policy": { "num_retries": 1197, "per_try_timeout": {}, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py index 9d236de43da2..9b4be450fc49 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_subnetworks.py @@ -2130,6 +2130,8 @@ def test_get_rest_required_fields(request_type=compute.GetSubnetworkRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() ).get._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("views",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2189,7 +2191,7 @@ def test_get_rest_unset_required_fields(): unset_fields = transport.get._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set(("views",)) & set( ( "project", @@ -2952,6 +2954,7 @@ def test_list_rest_required_fields(request_type=compute.ListSubnetworksRequest): "order_by", "page_token", "return_partial_success", + "views", ) ) jsonified_request.update(unset_fields) @@ -3018,6 +3021,7 @@ def test_list_rest_unset_required_fields(): "orderBy", "pageToken", "returnPartialSuccess", + "views", ) ) & set( @@ -5777,6 +5781,21 @@ def test_insert_rest_call_success(request_type): "system_reserved_internal_ipv6_ranges_value1", "system_reserved_internal_ipv6_ranges_value2", ], + "utilization_details": { + "external_ipv6_instance_utilization": { + "total_allocated_ip": {"high": 416, "low": 338}, + "total_free_ip": {}, + }, + "external_ipv6_lb_utilization": {}, + "internal_ipv6_utilization": {}, + "ipv4_utilizations": [ + { + "range_name": "range_name_value", + "total_allocated_ip": 1892, + "total_free_ip": 1373, + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -6331,6 +6350,21 @@ def test_patch_rest_call_success(request_type): "system_reserved_internal_ipv6_ranges_value1", "system_reserved_internal_ipv6_ranges_value2", ], + "utilization_details": { + "external_ipv6_instance_utilization": { + "total_allocated_ip": {"high": 416, "low": 338}, + "total_free_ip": {}, + }, + "external_ipv6_lb_utilization": {}, + "internal_ipv6_utilization": {}, + "ipv4_utilizations": [ + { + "range_name": "range_name_value", + "total_allocated_ip": 1892, + "total_free_ip": 1373, + } + ], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py index f28823e0191f..3be0afcf503a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py @@ -4434,7 +4434,10 @@ def test_insert_rest_call_success(request_type): }, }, "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, + "request_mirror_policy": { + "backend_service": "backend_service_value", + "mirror_percent": 0.1515, + }, "retry_policy": { "num_retries": 1197, "per_try_timeout": {}, @@ -5225,7 +5228,10 @@ def test_patch_rest_call_success(request_type): }, }, "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, + "request_mirror_policy": { + "backend_service": "backend_service_value", + "mirror_percent": 0.1515, + }, "retry_policy": { "num_retries": 1197, "per_try_timeout": {}, @@ -5642,7 +5648,10 @@ def test_update_rest_call_success(request_type): }, }, "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, + "request_mirror_policy": { + "backend_service": "backend_service_value", + "mirror_percent": 0.1515, + }, "retry_policy": { "num_retries": 1197, "per_try_timeout": {}, @@ -6067,7 +6076,10 @@ def test_validate_rest_call_success(request_type): }, }, "max_stream_duration": {}, - "request_mirror_policy": {"backend_service": "backend_service_value"}, + "request_mirror_policy": { + "backend_service": "backend_service_value", + "mirror_percent": 0.1515, + }, "retry_policy": { "num_retries": 1197, "per_try_timeout": {}, diff --git a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/types/cloud_event.py b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/types/cloud_event.py index 0188a7b62e30..ce7b1df133ef 100644 --- a/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/types/cloud_event.py +++ b/packages/google-cloud-eventarc-publishing/google/cloud/eventarc_publishing_v1/types/cloud_event.py @@ -69,6 +69,13 @@ class CloudEvent(proto.Message): proto_data (google.protobuf.any_pb2.Any): Optional. Proto data. + NOTE: The ``protoData`` field only functions as expected + when the payload is specifically a ``CloudEvent`` message + type, and can't be used for arbitrary protocol buffer + messages. For any other protocol buffer type, you must + serialize your proto message into bytes, and use the + ``binaryData`` field instead. + This field is a member of `oneof`_ ``data``. """ diff --git a/packages/google-maps-places/samples/README.rst b/packages/google-maps-places/samples/README.rst deleted file mode 100644 index 35130da03a79..000000000000 --- a/packages/google-maps-places/samples/README.rst +++ /dev/null @@ -1,12 +0,0 @@ -Samples -======= - -See `samples`_ for the :code:`google-maps-places` library. - -.. _samples: https://developers.google.com/maps/documentation/places/web-service/client-library-examples - - -**Important**: When making API calls, you must include the :code:`x-goog-fieldmask` key within the :code:`metadata` argument. This key needs a `Field Mask`_. For an example of this, check out the sample code for `Place Details`_, which shows how to set the `Field Mask`_. - -.. _Place Details: https://developers.google.com/maps/documentation/places/web-service/client-library-examples#python_1 -.. _Field Mask: https://developers.google.com/maps/documentation/places/web-service/place-details#fieldmask diff --git a/packages/google-shopping-css/.OwlBot.yaml b/packages/google-shopping-css/.OwlBot.yaml deleted file mode 100644 index 03e3b9520666..000000000000 --- a/packages/google-shopping-css/.OwlBot.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -deep-copy-regex: - - source: /google/shopping/css/(v.*)/.*-py - dest: /owl-bot-staging/google-shopping-css/$1 -api-name: google-shopping-css diff --git a/packages/google-shopping-css/google/shopping/css/gapic_version.py b/packages/google-shopping-css/google/shopping/css/gapic_version.py index 20a9cd975b02..f4e0e4f13cc0 100644 --- a/packages/google-shopping-css/google/shopping/css/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.17" # {x-release-please-version} diff --git a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py index 20a9cd975b02..f4e0e4f13cc0 100644 --- a/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py +++ b/packages/google-shopping-css/google/shopping/css_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "0.1.17" # {x-release-please-version} diff --git a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json index c620eea1d5f5..e4a44f19bca5 100644 --- a/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json +++ b/packages/google-shopping-css/samples/generated_samples/snippet_metadata_google.shopping.css.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-shopping-css", - "version": "0.1.0" + "version": "0.1.17" }, "snippets": [ { diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products/__init__.py index 9a575ce7fc63..656fb4921c9c 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products/__init__.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products/__init__.py @@ -46,6 +46,7 @@ AgeGroup, AutomatedDiscounts, Availability, + CarrierTransitTimeOption, CertificationAuthority, CertificationName, CloudExportAdditionalProperties, @@ -117,6 +118,7 @@ "UnitPricingMeasure", "AgeGroup", "Availability", + "CarrierTransitTimeOption", "CertificationAuthority", "CertificationName", "Condition", diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/__init__.py index 1b367c14b8b3..008f766322a6 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/__init__.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/__init__.py @@ -39,6 +39,7 @@ AgeGroup, AutomatedDiscounts, Availability, + CarrierTransitTimeOption, CertificationAuthority, CertificationName, CloudExportAdditionalProperties, @@ -80,6 +81,7 @@ "AgeGroup", "AutomatedDiscounts", "Availability", + "CarrierTransitTimeOption", "CertificationAuthority", "CertificationName", "CloudExportAdditionalProperties", diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/__init__.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/__init__.py index 0c8a62be54d8..34a92686f899 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/__init__.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/__init__.py @@ -29,6 +29,7 @@ AgeGroup, AutomatedDiscounts, Availability, + CarrierTransitTimeOption, CertificationAuthority, CertificationName, CloudExportAdditionalProperties, @@ -96,6 +97,7 @@ "UnitPricingMeasure", "AgeGroup", "Availability", + "CarrierTransitTimeOption", "CertificationAuthority", "CertificationName", "Condition", diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products_common.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products_common.py index 3a7d9670607a..fb9c54bbf934 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products_common.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products_common.py @@ -40,6 +40,7 @@ "CertificationAuthority", "CertificationName", "DigitalSourceType", + "CarrierTransitTimeOption", "ProductAttributes", "ShippingWeight", "ShippingDimension", @@ -483,6 +484,135 @@ class DigitalSourceType(proto.Enum): DEFAULT = 2 +class CarrierTransitTimeOption(proto.Enum): + r"""Possible carrier where transit time is coming from. + + Values: + CARRIER_TRANSIT_TIME_OPTION_UNSPECIFIED (0): + Carrier transit time option is unspecified. + DHL_PAKET (1): + DHL Paket shipping service. + DHL_PACKCHEN (2): + DHL Packchen shipping service. + DHL_EXPRESSEASY (3): + DHL Express Easy shipping service. + DPD_EXPRESS (4): + DPD Express shipping service. + DPD_CLASSIC_PARCEL (5): + DPD Classic Parcel shipping service. + HERMES_HAUSTUR (6): + Hermes Haustur shipping service. + HERMES_PAKETSHOP (7): + Hermes Paketshop shipping service. + GLS_BUSINESS (8): + GLS Business shipping service. + GLS_EXPRESS (9): + GLS Express shipping service. + GLS_PRIVATE (10): + GLS Private shipping service. + COLISSIMO_DOMICILE (11): + Colissimo Domicile shipping service. + DHL_EXPRESS_12AM (12): + DHL Express 12 AM shipping service. + DHL_EXPRESS_9AM (13): + DHL Express 9 AM shipping service. + GEODIS_EXPRESS (14): + GEODIS Express shipping service. + GEODIS_PACK_30 (15): + GEODIS Pack 30 shipping service. + GEODIS_SAME_DAY (16): + GEODIS Same Day shipping service. + GEODIS_TOP_24 (17): + GEODIS Top 24 shipping service. + TNT_ESSENTIEL_24H (18): + TNT Essentiel 24H shipping service. + TNT_ESSENTIEL_FLEXIBILITE (19): + TNT Essentiel Flexibilite shipping service. + FEDEX_GROUND (20): + FedEx Ground shipping service. + FEDEX_HOME_DELIVERY (21): + FedEx Home Delivery shipping service. + FEDEX_EXPRESS_SAVER (22): + FedEx Express Saver shipping service. + FEDEX_FIRST_OVERNIGHT (23): + FedEx First Overnight shipping service. + FEDEX_PRIORITY_OVERNIGHT (24): + FedEx Priority Overnight shipping service. + FEDEX_STANDARD_OVERNIGHT (25): + FedEx Standard Overnight shipping service. + FEDEX_2DAY (26): + FedEx 2Day shipping service. + UPS_2ND_DAY_AIR (27): + UPS 2nd Day Air shipping service. + UPS_2ND_DAY_AM (28): + UPS 2nd Day AM shipping service. + UPS_3_DAY_SELECT (29): + UPS 3 Day Select shipping service. + UPS_GROUND (30): + UPS Ground shipping service. + UPS_NEXT_DAY_AIR (31): + UPS Next Day Air shipping service. + UPS_NEXT_DAY_AIR_EARLY_AM (32): + UPS Next Day Air Early AM shipping service. + UPS_NEXT_DAY_AIR_SAVER (33): + UPS Next Day Air Saver shipping service. + USPS_PRIORITY_MAIL_EXPRESS (34): + USPS Priority Mail Express shipping service. + USPS_MEDIA_MAIL (35): + USPS Media Mail shipping service. + USPS_GROUND_ADVANTAGE_RETAIL (36): + USPS Ground Advantage Retail shipping + service. + USPS_PRIORITY_MAIL (37): + USPS Priority Mail shipping service. + USPS_GROUND_ADVANTAGE_COMMERCIAL (38): + USPS Ground Advantage Commercial shipping + service. + USPS_FIRST_CLASS_MAIL (39): + USPS First Class Mail shipping service. + """ + CARRIER_TRANSIT_TIME_OPTION_UNSPECIFIED = 0 + DHL_PAKET = 1 + DHL_PACKCHEN = 2 + DHL_EXPRESSEASY = 3 + DPD_EXPRESS = 4 + DPD_CLASSIC_PARCEL = 5 + HERMES_HAUSTUR = 6 + HERMES_PAKETSHOP = 7 + GLS_BUSINESS = 8 + GLS_EXPRESS = 9 + GLS_PRIVATE = 10 + COLISSIMO_DOMICILE = 11 + DHL_EXPRESS_12AM = 12 + DHL_EXPRESS_9AM = 13 + GEODIS_EXPRESS = 14 + GEODIS_PACK_30 = 15 + GEODIS_SAME_DAY = 16 + GEODIS_TOP_24 = 17 + TNT_ESSENTIEL_24H = 18 + TNT_ESSENTIEL_FLEXIBILITE = 19 + FEDEX_GROUND = 20 + FEDEX_HOME_DELIVERY = 21 + FEDEX_EXPRESS_SAVER = 22 + FEDEX_FIRST_OVERNIGHT = 23 + FEDEX_PRIORITY_OVERNIGHT = 24 + FEDEX_STANDARD_OVERNIGHT = 25 + FEDEX_2DAY = 26 + UPS_2ND_DAY_AIR = 27 + UPS_2ND_DAY_AM = 28 + UPS_3_DAY_SELECT = 29 + UPS_GROUND = 30 + UPS_NEXT_DAY_AIR = 31 + UPS_NEXT_DAY_AIR_EARLY_AM = 32 + UPS_NEXT_DAY_AIR_SAVER = 33 + USPS_PRIORITY_MAIL_EXPRESS = 34 + USPS_MEDIA_MAIL = 35 + USPS_GROUND_ADVANTAGE_RETAIL = 36 + USPS_PRIORITY_MAIL = 37 + USPS_GROUND_ADVANTAGE_COMMERCIAL = 38 + USPS_FIRST_CLASS_MAIL = 39 + + class ProductAttributes(proto.Message): r"""Product attributes. @@ -677,6 +807,8 @@ class ProductAttributes(proto.Message): (exclusive) and 2000 (inclusive). shipping (MutableSequence[google.shopping.merchant_products_v1.types.Shipping]): Shipping rules. + carrier_shipping (MutableSequence[google.shopping.merchant_products_v1.types.ProductAttributes.CarrierShipping]): + Rules for carrier-based shipping. free_shipping_threshold (MutableSequence[google.shopping.merchant_products_v1.types.FreeShippingThreshold]): Conditions to be met for a product to have free shipping. @@ -946,6 +1078,417 @@ class `__ programs. """ + class CarrierPriceOption(proto.Enum): + r"""Possible carrier where price is coming from. + + Values: + CARRIER_PRICE_OPTION_UNSPECIFIED (0): + Carrier price option is unspecified. + AUSTRALIA_POST_REGULAR (1): + Australia Post Regular shipping service. + AUSTRALIA_POST_EXPRESS (2): + Australia Post Express shipping service. + AUSTRALIA_POST_REGULAR_S (3): + Australia Post Regular Small shipping + service. + AUSTRALIA_POST_REGULAR_M (4): + Australia Post Regular Medium shipping + service. + AUSTRALIA_POST_REGULAR_L (5): + Australia Post Regular Large shipping + service. + AUSTRALIA_POST_REGULAR_XL (6): + Australia Post Regular XL shipping service. + AUSTRALIA_POST_EXPRESS_S (7): + Australia Post Express Small shipping + service. + AUSTRALIA_POST_EXPRESS_M (8): + Australia Post Express Medium shipping + service. + AUSTRALIA_POST_EXPRESS_L (9): + Australia Post Express Large shipping + service. + AUSTRALIA_POST_EXPRESS_XL (10): + Australia Post Express XL shipping service. + TNT_ROAD_EXPRESS (11): + TNT Road Express shipping service. + TNT_OVERNIGHT_EXPRESS (12): + TNT Overnight Express shipping service. + TOLL_ROAD_DELIVERY (13): + Toll Road Delivery shipping service. + TOLL_OVERNIGHT_PRIORITY (14): + Toll Overnight Priority shipping service. + DHL_PAKET (15): + DHL Paket shipping service. + DHL_PACKCHEN (16): + DHL Packchen shipping service. + DPD_EXPRESS_12 (17): + DPD Express 12 shipping service. + DPD_EXPRESS (18): + DPD Express shipping service. + DPD_CLASSIC_PARCEL (19): + DPD Classic Parcel shipping service. + HERMES_PACKCHEN (20): + Hermes Packchen shipping service. + HERMES_PAKETKLASSE_S (21): + Hermes Paketklasse S shipping service. + HERMES_PAKETKLASSE_M (22): + Hermes Paketklasse M shipping service. + HERMES_PAKETKLASSE_L (23): + Hermes Paketklasse L shipping service. + UPS_EXPRESS (24): + UPS Express shipping service. + UPS_EXPRESS_SAVER (25): + UPS Express Saver shipping service. + UPS_EXPRESS_STANDARD (26): + UPS Express Standard shipping service. + DHL_EXPRESS (27): + DHL Express shipping service. + DHL_EXPRESS_12 (28): + DHL Express 12 shipping service. + DPD_NEXT_DAY (29): + DPD Next Day shipping service. + DPD_STANDARD_NEXT_DAY (30): + DPD Standard Next Day shipping service. + DPD_STANDARD_TWO_DAY (31): + DPD Standard Two Day shipping service. + RMG_1ST_CLASS_SMALL (32): + RMG 1st Class Small shipping service. + RMG_1ST_CLASS_MEDIUM (33): + RMG 1st Class Medium shipping service. + RMG_2ND_CLASS_SMALL (34): + RMG 2nd Class Small shipping service. + RMG_2ND_CLASS_MEDIUM (35): + RMG 2nd Class Medium shipping service. + TNT_EXPRESS (36): + TNT Express shipping service. + TNT_EXPRESS_10 (37): + TNT Express 10 shipping service. + TNT_EXPRESS_12 (38): + TNT Express 12 shipping service. + YODEL_B2C_48HR (39): + Yodel B2C 48HR shipping service. + YODEL_B2C_72HR (40): + Yodel B2C 72HR shipping service. + YODEL_B2C_PACKET (41): + Yodel B2C Packet shipping service. + FEDEX_GROUND (42): + FedEx Ground shipping service. + FEDEX_HOME_DELIVERY (43): + FedEx Home Delivery shipping service. + FEDEX_EXPRESS_SAVER (44): + FedEx Express Saver shipping service. + FEDEX_FIRST_OVERNIGHT (45): + FedEx First Overnight shipping service. + FEDEX_PRIORITY_OVERNIGHT (46): + FedEx Priority Overnight shipping service. + FEDEX_STANDARD_OVERNIGHT (47): + FedEx Standard Overnight shipping service. + FEDEX_2DAY (48): + FedEx 2Day shipping service. + UPS_STANDARD (49): + UPS Standard shipping service. + UPS_2ND_DAY_AIR (50): + UPS 2nd Day Air shipping service. + UPS_2ND_DAY_AM (51): + UPS 2nd Day AM shipping service. + UPS_3_DAY_SELECT (52): + UPS 3 Day Select shipping service. + UPS_GROUND (53): + UPS Ground shipping service. + UPS_NEXT_DAY_AIR (54): + UPS Next Day Air shipping service. + UPS_NEXT_DAY_AIR_EARLY_AM (55): + UPS Next Day Air Early AM shipping service. + UPS_NEXT_DAY_AIR_SAVER (56): + UPS Next Day Air Saver shipping service. + USPS_PRIORITY_MAIL_EXPRESS (57): + USPS Priority Mail Express shipping service. + USPS_MEDIA_MAIL (58): + USPS Media Mail shipping service. + USPS_GROUND_ADVANTAGE_RETAIL (59): + USPS Ground Advantage Retail shipping + service. + USPS_PRIORITY_MAIL (60): + USPS Priority Mail shipping service. + USPS_GROUND_ADVANTAGE_COMMERCIAL (61): + USPS Ground Advantage Commercial shipping + service. + """ + CARRIER_PRICE_OPTION_UNSPECIFIED = 0 + AUSTRALIA_POST_REGULAR = 1 + AUSTRALIA_POST_EXPRESS = 2 + AUSTRALIA_POST_REGULAR_S = 3 + AUSTRALIA_POST_REGULAR_M = 4 + AUSTRALIA_POST_REGULAR_L = 5 + AUSTRALIA_POST_REGULAR_XL = 6 + AUSTRALIA_POST_EXPRESS_S = 7 + AUSTRALIA_POST_EXPRESS_M = 8 + AUSTRALIA_POST_EXPRESS_L = 9 + AUSTRALIA_POST_EXPRESS_XL = 10 + TNT_ROAD_EXPRESS = 11 + TNT_OVERNIGHT_EXPRESS = 12 + TOLL_ROAD_DELIVERY = 13 + TOLL_OVERNIGHT_PRIORITY = 14 + DHL_PAKET = 15 + DHL_PACKCHEN = 16 + DPD_EXPRESS_12 = 17 + DPD_EXPRESS = 18 + DPD_CLASSIC_PARCEL = 19 + HERMES_PACKCHEN = 20 + HERMES_PAKETKLASSE_S = 21 + HERMES_PAKETKLASSE_M = 22 + HERMES_PAKETKLASSE_L = 23 + UPS_EXPRESS = 24 + UPS_EXPRESS_SAVER = 25 + UPS_EXPRESS_STANDARD = 26 + DHL_EXPRESS = 27 + DHL_EXPRESS_12 = 28 + DPD_NEXT_DAY = 29 + DPD_STANDARD_NEXT_DAY = 30 + DPD_STANDARD_TWO_DAY = 31 + RMG_1ST_CLASS_SMALL = 32 + RMG_1ST_CLASS_MEDIUM = 33 + RMG_2ND_CLASS_SMALL = 34 + RMG_2ND_CLASS_MEDIUM = 35 + TNT_EXPRESS = 36 + TNT_EXPRESS_10 = 37 + TNT_EXPRESS_12 = 38 + YODEL_B2C_48HR = 39 + YODEL_B2C_72HR = 40 + YODEL_B2C_PACKET = 41 + FEDEX_GROUND = 42 + FEDEX_HOME_DELIVERY = 43 + FEDEX_EXPRESS_SAVER = 44 + FEDEX_FIRST_OVERNIGHT = 45 + FEDEX_PRIORITY_OVERNIGHT = 46 + FEDEX_STANDARD_OVERNIGHT = 47 + FEDEX_2DAY = 48 + UPS_STANDARD = 49 + UPS_2ND_DAY_AIR = 50 + UPS_2ND_DAY_AM = 51 + UPS_3_DAY_SELECT = 52 + UPS_GROUND = 53 + UPS_NEXT_DAY_AIR = 54 + UPS_NEXT_DAY_AIR_EARLY_AM = 55 + UPS_NEXT_DAY_AIR_SAVER = 56 + USPS_PRIORITY_MAIL_EXPRESS = 57 + USPS_MEDIA_MAIL = 58 + USPS_GROUND_ADVANTAGE_RETAIL = 59 + USPS_PRIORITY_MAIL = 60 + USPS_GROUND_ADVANTAGE_COMMERCIAL = 61 + + class CarrierShipping(proto.Message): + r"""Carrier-based shipping configuration. Allows for setting + shipping speed or shipping cost based on a carrier's provided + info. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + country (str): + The `CLDR territory + code `__ + of the country to which an item will ship. + + This field is a member of `oneof`_ ``_country``. + region (str): + The geographic region to which a shipping rate applies. See + `region `__ + for more information. + + This field is a member of `oneof`_ ``_region``. + postal_code (str): + The postal code range that the shipping rate applies to, + represented by a postal code (eg. ``94043``), a postal code + prefix followed by a \* wildcard (eg. ``94*``), a range + between two postal codes (eg. ``94043-98033``) or two postal + code prefixes of equal length (eg. ``94*-98*``). + + This field is a member of `oneof`_ ``_postal_code``. + origin_postal_code (str): + The source location postal code from which + this offer ships. Represented only by a + full-length postal code. + + This field is a member of `oneof`_ ``_origin_postal_code``. + flat_price (google.shopping.type.types.Price): + Fixed shipping price, represented as a number with currency. + Cannot be set together with + [carrierPrice][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.carrier_price] + or its adjustments + ([carrierPriceFlatAdjustment][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.carrier_price_flat_adjustment], + [carrierPricePercentageAdjustment][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.carrier_price_percentage_adjustment]). + + This field is a member of `oneof`_ ``_flat_price``. + carrier_price (google.shopping.merchant_products_v1.types.ProductAttributes.CarrierPriceOption): + Selected carrier to calculate the shipping price from. + Select a carrier from the `available carriers + list `__, + for example ``AUSTRALIA_POST_REGULAR``. Price will be + calculated by this selected carrier, the location expressed + in + [originPostalCode][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.origin_postal_code], + along with the user location to determine the accurate + shipping price. Carrier is represented by a carrier service + name or a carrier service ID. Cannot be set together with + [flatPrice][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.flat_price]. + + This field is a member of `oneof`_ ``_carrier_price``. + carrier_price_flat_adjustment (google.shopping.type.types.Price): + A flat adjustment on the carrier price. Can be either + positive or negative. Cannot be zero. Requires + ``carrier_price`` to be present. Cannot be set together with + [flatPrice][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.flat_price] + and + [carrierPricePercentageAdjustment][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.carrier_price_percentage_adjustment]. + + This field is a member of `oneof`_ ``_carrier_price_flat_adjustment``. + carrier_price_percentage_adjustment (float): + A percentual adjustment on the carrier price. Can be either + positive or negative. Cannot be zero. Requires + ``carrier_price`` to be present. Cannot be set together with + [flatPrice][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.flat_price] + and + [carrierPriceFlatAdjustment][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.carrier_price_flat_adjustment]. + + This field is a member of `oneof`_ ``_carrier_price_percentage_adjustment``. + min_handling_time (int): + Minimum handling time (inclusive) between when the order is + received and shipped in business days. 0 means that the + order is shipped on the same day as it is received if it + happens before the cut-off time. + [minHandlingTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.min_handling_time] + can only be set if + [maxHandlingTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.max_handling_time] + is also set. + + This field is a member of `oneof`_ ``_min_handling_time``. + max_handling_time (int): + Maximum handling time (inclusive) between when the order is + received and shipped in business days. 0 means that the + order is shipped on the same day as it is received if it + happens before the cut-off time. Both + [maxHandlingTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.max_handling_time] + and + [fixedMaxTransitTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.fixed_max_transit_time] + or + [carrierTransitTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.carrier_transit_time] + are required if providing shipping speeds. + + This field is a member of `oneof`_ ``_max_handling_time``. + fixed_min_transit_time (int): + Minimum transit time (inclusive) between when the order has + shipped and when it is delivered in business days. 0 means + that the order is delivered on the same day as it ships. + [fixedMinTransitTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.fixed_min_transit_time] + can only be set if + [fixedMaxTransitTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.fixed_max_transit_time] + is set. Cannot be set if + [carrierTransitTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.carrier_transit_time] + is present. + + This field is a member of `oneof`_ ``_fixed_min_transit_time``. + fixed_max_transit_time (int): + Maximum transit time (inclusive) between when the order has + shipped and when it is delivered in business days. 0 means + that the order is delivered on the same day as it ships. + Needs to be provided together with + [maxHandlingTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.max_handling_time]. + Cannot be set if + [carrierTransitTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.carrier_transit_time] + is present. + + This field is a member of `oneof`_ ``_fixed_max_transit_time``. + carrier_transit_time (google.shopping.merchant_products_v1.types.CarrierTransitTimeOption): + Selected carrier to calculate the shipping speed from. + Select a carrier from the `available carriers + list `__, + for example ``AUSTRALIA_POST_REGULAR``. Speed will be + calculated by this selected carrier, the location expressed + in + [originPostalCode][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.origin_postal_code], + along with the user location to determine the accurate + delivery speed. Carrier is represented by a carrier service + name or a carrier service ID. Cannot be set together with + [fixedMaxTransitTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.fixed_max_transit_time] + or + [fixedMinTransitTime][google.shopping.merchant.products.v1.ProductAttributes.CarrierShipping.fixed_min_transit_time]. + + This field is a member of `oneof`_ ``_carrier_transit_time``. + """ + + country: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + postal_code: str = proto.Field( + proto.STRING, + number=3, + optional=True, + ) + origin_postal_code: str = proto.Field( + proto.STRING, + number=4, + optional=True, + ) + flat_price: types.Price = proto.Field( + proto.MESSAGE, + number=5, + optional=True, + message=types.Price, + ) + carrier_price: "ProductAttributes.CarrierPriceOption" = proto.Field( + proto.ENUM, + number=6, + optional=True, + enum="ProductAttributes.CarrierPriceOption", + ) + carrier_price_flat_adjustment: types.Price = proto.Field( + proto.MESSAGE, + number=7, + optional=True, + message=types.Price, + ) + carrier_price_percentage_adjustment: float = proto.Field( + proto.DOUBLE, + number=8, + optional=True, + ) + min_handling_time: int = proto.Field( + proto.INT64, + number=9, + optional=True, + ) + max_handling_time: int = proto.Field( + proto.INT64, + number=10, + optional=True, + ) + fixed_min_transit_time: int = proto.Field( + proto.INT64, + number=11, + optional=True, + ) + fixed_max_transit_time: int = proto.Field( + proto.INT64, + number=12, + optional=True, + ) + carrier_transit_time: "CarrierTransitTimeOption" = proto.Field( + proto.ENUM, + number=13, + optional=True, + enum="CarrierTransitTimeOption", + ) + identifier_exists: bool = proto.Field( proto.BOOL, number=4, @@ -1147,6 +1690,11 @@ class `__ number=39, message="Shipping", ) + carrier_shipping: MutableSequence[CarrierShipping] = proto.RepeatedField( + proto.MESSAGE, + number=142, + message=CarrierShipping, + ) free_shipping_threshold: MutableSequence[ "FreeShippingThreshold" ] = proto.RepeatedField( diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_product_inputs_service.py b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_product_inputs_service.py index f124fcb45f65..d94e4154bb99 100644 --- a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_product_inputs_service.py +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_product_inputs_service.py @@ -3142,6 +3142,23 @@ def test_insert_product_input_rest_call_success(request_type): "max_transit_time": 1720, } ], + "carrier_shipping": [ + { + "country": "country_value", + "region": "region_value", + "postal_code": "postal_code_value", + "origin_postal_code": "origin_postal_code_value", + "flat_price": {}, + "carrier_price": 1, + "carrier_price_flat_adjustment": {}, + "carrier_price_percentage_adjustment": 0.37010000000000004, + "min_handling_time": 1782, + "max_handling_time": 1784, + "fixed_min_transit_time": 2341, + "fixed_max_transit_time": 2343, + "carrier_transit_time": 1, + } + ], "free_shipping_threshold": [ {"country": "country_value", "price_threshold": {}} ], @@ -3534,6 +3551,23 @@ def test_update_product_input_rest_call_success(request_type): "max_transit_time": 1720, } ], + "carrier_shipping": [ + { + "country": "country_value", + "region": "region_value", + "postal_code": "postal_code_value", + "origin_postal_code": "origin_postal_code_value", + "flat_price": {}, + "carrier_price": 1, + "carrier_price_flat_adjustment": {}, + "carrier_price_percentage_adjustment": 0.37010000000000004, + "min_handling_time": 1782, + "max_handling_time": 1784, + "fixed_min_transit_time": 2341, + "fixed_max_transit_time": 2343, + "carrier_transit_time": 1, + } + ], "free_shipping_threshold": [ {"country": "country_value", "price_threshold": {}} ],